From 935bc284c4bacf323b25425170f6da757398534c Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Mon, 22 May 2017 08:52:08 +0200 Subject: [PATCH 001/745] Update assertj-core from 3.7.0 -> 3.8.0 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 2c04b3516..0aaeaafe7 100644 --- a/pom.xml +++ b/pom.xml @@ -173,7 +173,7 @@ org.assertj assertj-core - 3.7.0 + 3.8.0 test From 02fe3a59f28b6501bfcafc5e34577b95c432173f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Marty?= Date: Tue, 23 May 2017 13:38:00 +0200 Subject: [PATCH 002/745] influxdb-java-182 Allow write precision of TimeUnit other than Nanoseconds --- src/main/java/org/influxdb/InfluxDB.java | 20 + .../java/org/influxdb/dto/BatchPoints.java | 41 +- src/main/java/org/influxdb/dto/Point.java | 20 + .../java/org/influxdb/impl/InfluxDBImpl.java | 35 +- src/test/java/org/influxdb/InfluxDBTest.java | 141 ++++ src/test/java/org/influxdb/dto/PointTest.java | 668 ++++++++++-------- 6 files changed, 635 insertions(+), 290 deletions(-) diff --git a/src/main/java/org/influxdb/InfluxDB.java b/src/main/java/org/influxdb/InfluxDB.java index 2d1793dfb..69c0dbb7c 100644 --- a/src/main/java/org/influxdb/InfluxDB.java +++ b/src/main/java/org/influxdb/InfluxDB.java @@ -183,6 +183,16 @@ public InfluxDB enableBatch(final int actions, final int flushDuration, final Ti public void write(final String database, final String retentionPolicy, final ConsistencyLevel consistency, final String records); + /** + * Write a set of Points to the influxdb database with the string records. + * + * {@linkplain "https://github.com/influxdb/influxdb/pull/2696"} + * + * @param records + */ + public void write(final String database, final String retentionPolicy, + final ConsistencyLevel consistency, final TimeUnit precision, final String records); + /** * Write a set of Points to the influxdb database with the list of string records. * @@ -193,6 +203,16 @@ public void write(final String database, final String retentionPolicy, public void write(final String database, final String retentionPolicy, final ConsistencyLevel consistency, final List records); + /** + * Write a set of Points to the influxdb database with the list of string records. + * + * {@linkplain "https://github.com/influxdb/influxdb/pull/2696"} + * + * @param records + */ + public void write(final String database, final String retentionPolicy, + final ConsistencyLevel consistency, final TimeUnit precision, final List records); + /** * Write a set of Points to the influxdb database with the string records through UDP. * diff --git a/src/main/java/org/influxdb/dto/BatchPoints.java b/src/main/java/org/influxdb/dto/BatchPoints.java index 0f39540d7..8886fea69 100644 --- a/src/main/java/org/influxdb/dto/BatchPoints.java +++ b/src/main/java/org/influxdb/dto/BatchPoints.java @@ -4,6 +4,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.concurrent.TimeUnit; import org.influxdb.InfluxDB.ConsistencyLevel; @@ -27,6 +28,7 @@ public class BatchPoints { private Map tags; private List points; private ConsistencyLevel consistency; + private TimeUnit precision; BatchPoints() { // Only visible in the Builder @@ -52,6 +54,7 @@ public static final class Builder { private final Map tags = Maps.newTreeMap(Ordering.natural()); private final List points = Lists.newArrayList(); private ConsistencyLevel consistency; + private TimeUnit precision; /** * @param database @@ -118,6 +121,16 @@ public Builder consistency(final ConsistencyLevel consistencyLevel) { return this; } + /** + * Set the time precision to use for the whole batch. If unspecified, will default to {@link TimeUnit#NANOSECONDS} + * @param precision + * @return the Builder instance + */ + public Builder precision(final TimeUnit precision) { + this.precision = precision; + return this; + } + /** * Create a new BatchPoints instance. * @@ -138,6 +151,10 @@ public BatchPoints build() { this.consistency = ConsistencyLevel.ONE; } batchPoints.setConsistency(this.consistency); + if (null == this.precision) { + this.precision = TimeUnit.NANOSECONDS; + } + batchPoints.setPrecision(this.precision); return batchPoints; } } @@ -187,6 +204,20 @@ void setPoints(final List points) { this.points = points; } + /** + * @return the time precision unit + */ + public TimeUnit getPrecision() { + return precision; + } + + /** + * @param precision the time precision to set for the batch points + */ + public void setPrecision(TimeUnit precision) { + this.precision = precision; + } + /** * Add a single Point to these batches. * @@ -242,12 +273,13 @@ public boolean equals(final Object o) { && Objects.equals(retentionPolicy, that.retentionPolicy) && Objects.equals(tags, that.tags) && Objects.equals(points, that.points) - && consistency == that.consistency; + && consistency == that.consistency + && precision == that.precision; } @Override public int hashCode() { - return Objects.hash(database, retentionPolicy, tags, points, consistency); + return Objects.hash(database, retentionPolicy, tags, points, consistency, precision); } /** @@ -264,6 +296,8 @@ public String toString() { builder.append(this.consistency); builder.append(", tags="); builder.append(this.tags); + builder.append(", precision="); + builder.append(this.precision); builder.append(", points="); builder.append(this.points); builder.append("]"); @@ -278,8 +312,9 @@ public String toString() { */ public String lineProtocol() { StringBuilder sb = new StringBuilder(); + for (Point point : this.points) { - sb.append(point.lineProtocol()).append("\n"); + sb.append(point.lineProtocol(this.precision)).append("\n"); } return sb.toString(); } diff --git a/src/main/java/org/influxdb/dto/Point.java b/src/main/java/org/influxdb/dto/Point.java index 760a155ef..a19a18348 100644 --- a/src/main/java/org/influxdb/dto/Point.java +++ b/src/main/java/org/influxdb/dto/Point.java @@ -323,6 +323,20 @@ public String lineProtocol() { return sb.toString(); } + /** + * Calculate the lineprotocol entry for a single point, using a specific {@link TimeUnit} for the timestamp + * @param precision the time precision unit for this point + * @return the String without newLine + */ + public String lineProtocol(final TimeUnit precision) { + final StringBuilder sb = new StringBuilder(); + sb.append(KEY_ESCAPER.escape(this.measurement)); + sb.append(concatenatedTags()); + sb.append(concatenateFields()); + sb.append(formatedTime(precision)); + return sb.toString(); + } + private StringBuilder concatenatedTags() { final StringBuilder sb = new StringBuilder(); for (Entry tag : this.tags.entrySet()) { @@ -380,4 +394,10 @@ private StringBuilder formatedTime() { return sb; } + private StringBuilder formatedTime(TimeUnit precision) { + final StringBuilder sb = new StringBuilder(); + sb.append(" ").append(precision.convert(this.time, this.precision)); + return sb; + } + } diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index 293909fcb..dd3718618 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -259,31 +259,46 @@ public void write(final BatchPoints batchPoints) { this.password, batchPoints.getDatabase(), batchPoints.getRetentionPolicy(), - TimeUtil.toTimePrecision(TimeUnit.NANOSECONDS), + TimeUtil.toTimePrecision(batchPoints.getPrecision()), batchPoints.getConsistency().value(), lineProtocol)); } + + @Override + public void write(String database, String retentionPolicy, ConsistencyLevel consistency, + TimeUnit precision, String records) { + execute(this.influxDBService.writePoints( + this.username, + this.password, + database, + retentionPolicy, + TimeUtil.toTimePrecision(precision), + consistency.value(), + RequestBody.create(MEDIA_TYPE_STRING, records))); + } + @Override public void write(final String database, final String retentionPolicy, final ConsistencyLevel consistency, final String records) { - execute(this.influxDBService.writePoints( - this.username, - this.password, - database, - retentionPolicy, - TimeUtil.toTimePrecision(TimeUnit.NANOSECONDS), - consistency.value(), - RequestBody.create(MEDIA_TYPE_STRING, records))); + write(database, retentionPolicy, consistency, TimeUnit.NANOSECONDS, records); } @Override public void write(final String database, final String retentionPolicy, final ConsistencyLevel consistency, final List records) { + write(database, retentionPolicy, consistency, TimeUnit.NANOSECONDS, records); + } + + + @Override + public void write(String database, String retentionPolicy, ConsistencyLevel consistency, + TimeUnit precision, List records) { final String joinedRecords = Joiner.on("\n").join(records); - write(database, retentionPolicy, consistency, joinedRecords); + write(database, retentionPolicy, consistency, precision, joinedRecords); } + /** * {@inheritDoc} */ diff --git a/src/test/java/org/influxdb/InfluxDBTest.java b/src/test/java/org/influxdb/InfluxDBTest.java index 132df79ac..44623b3b6 100644 --- a/src/test/java/org/influxdb/InfluxDBTest.java +++ b/src/test/java/org/influxdb/InfluxDBTest.java @@ -1,5 +1,7 @@ package org.influxdb; +import static org.assertj.core.api.Assertions.assertThat; + import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; @@ -19,12 +21,14 @@ import org.influxdb.dto.Query; import org.influxdb.dto.QueryResult; import org.influxdb.impl.InfluxDBImpl; +import org.influxdb.impl.TimeUtil; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; +import com.google.common.collect.Lists; import com.google.common.util.concurrent.Uninterruptibles; /** @@ -348,6 +352,143 @@ public void testWriteMultipleStringDataLines() { this.influxDB.deleteDatabase(dbName); } + /** + * Tests writing points using the time precision feature + * @throws Exception + */ + @Test + public void testWriteBatchWithPrecision() throws Exception { + // GIVEN a database and a measurement + String dbName = "precision_unittest_" + System.currentTimeMillis(); + this.influxDB.createDatabase(dbName); + + String rp = TestUtils.defaultRetentionPolicy(this.influxDB.version()); + + String measurement = TestUtils.getRandomMeasurement(); + + // GIVEN a batch of points using second precision + Point p1 = Point + .measurement(measurement) + .addField("foo", 1d) + .tag("device", "one") + .time(1485273600, TimeUnit.SECONDS).build(); // 2017-01-27T16:00:00 + String timeP1 = TimeUtil.toInfluxDBTimeFormat(1485273600000L); + Point p2 = Point + .measurement(measurement) + .addField("foo", 2d) + .tag("device", "two") + .time(1485277200, TimeUnit.SECONDS).build(); // 2017-01-27T17:00:00 + String timeP2 = TimeUtil.toInfluxDBTimeFormat(1485277200000L); + Point p3 = Point + .measurement(measurement) + .addField("foo", 3d) + .tag("device", "three") + .time(1485280800, TimeUnit.SECONDS).build(); // 2017-01-27T18:00:00 + String timeP3 = TimeUtil.toInfluxDBTimeFormat(1485280800000L); + + BatchPoints batchPoints = BatchPoints + .database(dbName) + .retentionPolicy(rp) + .precision(TimeUnit.SECONDS) + .points(p1, p2, p3) + .build(); + + // WHEN I write the batch + this.influxDB.write(batchPoints); + + // THEN the measure points have a timestamp with second precision + QueryResult queryResult = this.influxDB.query(new Query("SELECT * FROM " + measurement, dbName)); + assertThat(queryResult.getResults().get(0).getSeries().get(0).getValues().size()).isEqualTo(3); + assertThat(queryResult.getResults().get(0).getSeries().get(0).getValues().get(0).get(0)).isEqualTo(timeP1); + assertThat(queryResult.getResults().get(0).getSeries().get(0).getValues().get(1).get(0)).isEqualTo(timeP2); + assertThat(queryResult.getResults().get(0).getSeries().get(0).getValues().get(2).get(0)).isEqualTo(timeP3); + + this.influxDB.deleteDatabase(dbName); + } + + @Test + public void testWriteBatchWithoutPrecision() throws Exception { + // GIVEN a database and a measurement + String dbName = "precision_unittest_" + System.currentTimeMillis(); + this.influxDB.createDatabase(dbName); + + String rp = TestUtils.defaultRetentionPolicy(this.influxDB.version()); + + String measurement = TestUtils.getRandomMeasurement(); + + // GIVEN a batch of points that has no specific precision + Point p1 = Point + .measurement(measurement) + .addField("foo", 1d) + .tag("device", "one") + .time(1485273600000000100L, TimeUnit.NANOSECONDS).build(); // 2017-01-27T16:00:00.000000100Z + String timeP1 = "2017-01-27T16:00:00.000000100Z"; + Point p2 = Point + .measurement(measurement) + .addField("foo", 2d) + .tag("device", "two") + .time(1485277200000000200L, TimeUnit.NANOSECONDS).build(); // 2017-01-27T17:00:00.000000200Z + String timeP2 = "2017-01-27T17:00:00.000000200Z"; + Point p3 = Point + .measurement(measurement) + .addField("foo", 3d) + .tag("device", "three") + .time(1485280800000000300L, TimeUnit.NANOSECONDS).build(); // 2017-01-27T18:00:00.000000300Z + String timeP3 = "2017-01-27T18:00:00.000000300Z"; + + BatchPoints batchPoints = BatchPoints + .database(dbName) + .retentionPolicy(rp) + .points(p1, p2, p3) + .build(); + + // WHEN I write the batch + this.influxDB.write(batchPoints); + + // THEN the measure points have a timestamp with second precision + QueryResult queryResult = this.influxDB.query(new Query("SELECT * FROM " + measurement, dbName)); + assertThat(queryResult.getResults().get(0).getSeries().get(0).getValues().size()).isEqualTo(3); + assertThat(queryResult.getResults().get(0).getSeries().get(0).getValues().get(0).get(0)).isEqualTo(timeP1); + assertThat(queryResult.getResults().get(0).getSeries().get(0).getValues().get(1).get(0)).isEqualTo(timeP2); + assertThat(queryResult.getResults().get(0).getSeries().get(0).getValues().get(2).get(0)).isEqualTo(timeP3); + + this.influxDB.deleteDatabase(dbName); + } + + @Test + public void testWriteRecordsWithPrecision() throws Exception { + // GIVEN a database and a measurement + String dbName = "precision_unittest_" + System.currentTimeMillis(); + this.influxDB.createDatabase(dbName); + + String rp = TestUtils.defaultRetentionPolicy(this.influxDB.version()); + + String measurement = TestUtils.getRandomMeasurement(); + + // GIVEN a set of records using second precision + List records = Lists.newArrayList(); + records.add("cpu,atag=test1 idle=100,usertime=10,system=1 1485273600"); + String timeP1 = TimeUtil.toInfluxDBTimeFormat(1485273600000L); + + records.add("cpu,atag=test2 idle=200,usertime=20,system=2 1485277200"); + String timeP2 = TimeUtil.toInfluxDBTimeFormat(1485277200000L); + + records.add("cpu,atag=test3 idle=300,usertime=30,system=3 1485280800"); + String timeP3 = TimeUtil.toInfluxDBTimeFormat(1485280800000L); + + // WHEN I write the batch + this.influxDB.write(dbName, rp, null, TimeUnit.SECONDS, records); + + // THEN the measure points have a timestamp with second precision + QueryResult queryResult = this.influxDB.query(new Query("SELECT * FROM " + measurement, dbName)); + assertThat(queryResult.getResults().get(0).getSeries().get(0).getValues().size()).isEqualTo(3); + assertThat(queryResult.getResults().get(0).getSeries().get(0).getValues().get(0).get(0)).isEqualTo(timeP1); + assertThat(queryResult.getResults().get(0).getSeries().get(0).getValues().get(1).get(0)).isEqualTo(timeP2); + assertThat(queryResult.getResults().get(0).getSeries().get(0).getValues().get(2).get(0)).isEqualTo(timeP3); + + this.influxDB.deleteDatabase(dbName); + } + /** * Test that creating database which name is composed of numbers only works */ diff --git a/src/test/java/org/influxdb/dto/PointTest.java b/src/test/java/org/influxdb/dto/PointTest.java index 2801f5dab..ac9c45b4b 100644 --- a/src/test/java/org/influxdb/dto/PointTest.java +++ b/src/test/java/org/influxdb/dto/PointTest.java @@ -4,6 +4,7 @@ import java.math.BigDecimal; import java.math.BigInteger; +import java.util.Date; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; @@ -17,284 +18,397 @@ * Test for the Point DTO. * * @author stefan.majer [at] gmail.com - * */ public class PointTest { - /** - * Test that lineprotocol is conformant to: - * - * https://github.com/influxdb/influxdb/blob/master/tsdb/README.md - * - */ - @Test - public void lineProtocol() { - Point point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).addField("a", 1.0).build(); - assertThat(point.lineProtocol()).asString().isEqualTo("test a=1.0 1"); - - point = Point.measurement("test,1").time(1, TimeUnit.NANOSECONDS).addField("a", 1.0).build(); - assertThat(point.lineProtocol()).asString().isEqualTo("test\\,1 a=1.0 1"); - - point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).addField("a", "A").build(); - assertThat(point.lineProtocol()).asString().isEqualTo("test a=\"A\" 1"); - - point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).addField("a", "A\"B").build(); - assertThat(point.lineProtocol()).asString().isEqualTo("test a=\"A\\\"B\" 1"); - - point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).addField("a", "A\"B\"C").build(); - assertThat(point.lineProtocol()).asString().isEqualTo("test a=\"A\\\"B\\\"C\" 1"); - - point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).addField("a", "A B C").build(); - assertThat(point.lineProtocol()).asString().isEqualTo("test a=\"A B C\" 1"); - - point = Point - .measurement("test") - .time(1, TimeUnit.NANOSECONDS) - .addField("a", "A\"B") - .addField("b", "D E \"F") - .build(); - assertThat(point.lineProtocol()).asString().isEqualTo("test a=\"A\\\"B\",b=\"D E \\\"F\" 1"); - - //Integer type - point = Point.measurement("inttest").time(1, TimeUnit.NANOSECONDS).addField("a", (Integer)1).build(); - assertThat(point.lineProtocol()).asString().isEqualTo("inttest a=1i 1"); - - point = Point.measurement("inttest,1").time(1, TimeUnit.NANOSECONDS).addField("a", (Integer)1).build(); - assertThat(point.lineProtocol()).asString().isEqualTo("inttest\\,1 a=1i 1"); - - point = Point.measurement("inttest,1").time(1, TimeUnit.NANOSECONDS).addField("a", 1L).build(); - assertThat(point.lineProtocol()).asString().isEqualTo("inttest\\,1 a=1i 1"); - - point = Point.measurement("inttest,1").time(1, TimeUnit.NANOSECONDS).addField("a", BigInteger.valueOf(100)).build(); - assertThat(point.lineProtocol()).asString().isEqualTo("inttest\\,1 a=100i 1"); - } - - /** - * Test for ticket #44 - */ - @Test - public void testTicket44() { - Point point = Point.measurement("test").time(1, TimeUnit.MICROSECONDS).addField("a", 1.0).build(); - assertThat(point.lineProtocol()).asString().isEqualTo("test a=1.0 1000"); - - point = Point.measurement("test").time(1, TimeUnit.MILLISECONDS).addField("a", 1.0).build(); - assertThat(point.lineProtocol()).asString().isEqualTo("test a=1.0 1000000"); - - point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).addField("a", 1.0).build(); - BatchPoints batchPoints = BatchPoints.database("db").point(point).build(); - assertThat(batchPoints.lineProtocol()).asString().isEqualTo("test a=1.0 1\n"); - - point = Point.measurement("test").time(1, TimeUnit.MICROSECONDS).addField("a", 1.0).build(); - batchPoints = BatchPoints.database("db").point(point).build(); - assertThat(batchPoints.lineProtocol()).asString().isEqualTo("test a=1.0 1000\n"); - - point = Point.measurement("test").time(1, TimeUnit.MILLISECONDS).addField("a", 1.0).build(); - batchPoints = BatchPoints.database("db").point(point).build(); - assertThat(batchPoints.lineProtocol()).asString().isEqualTo("test a=1.0 1000000\n"); - - point = Point.measurement("test").addField("a", 1.0).time(1, TimeUnit.MILLISECONDS).build(); - batchPoints = BatchPoints.database("db").build(); - batchPoints = batchPoints.point(point); - assertThat(batchPoints.lineProtocol()).asString().isEqualTo("test a=1.0 1000000\n"); - - } - - /** - * Test for ticket #54 - */ - @Test - public void testTicket54() { - Byte byteNumber = 100; - Point point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).addField("a", byteNumber).build(); - assertThat(point.lineProtocol()).asString().isEqualTo("test a=100i 1"); - - int intNumber = 100000000; - point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).addField("a", intNumber).build(); - assertThat(point.lineProtocol()).asString().isEqualTo("test a=100000000i 1"); - - Integer integerNumber = 100000000; - point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).addField("a", integerNumber).build(); - assertThat(point.lineProtocol()).asString().isEqualTo("test a=100000000i 1"); - - AtomicInteger atomicIntegerNumber = new AtomicInteger(100000000); - point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).addField("a", atomicIntegerNumber).build(); - assertThat(point.lineProtocol()).asString().isEqualTo("test a=100000000i 1"); - - Long longNumber = 1000000000000000000L; - point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).addField("a", longNumber).build(); - assertThat(point.lineProtocol()).asString().isEqualTo("test a=1000000000000000000i 1"); - - AtomicLong atomicLongNumber = new AtomicLong(1000000000000000000L); - point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).addField("a", atomicLongNumber).build(); - assertThat(point.lineProtocol()).asString().isEqualTo("test a=1000000000000000000i 1"); - - BigInteger bigIntegerNumber = BigInteger.valueOf(100000000); - point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).addField("a", bigIntegerNumber).build(); - assertThat(point.lineProtocol()).asString().isEqualTo("test a=100000000i 1"); - - Double doubleNumber = Double.valueOf(100000000.0001); - point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).addField("a", doubleNumber).build(); - assertThat(point.lineProtocol()).asString().isEqualTo("test a=100000000.0001 1"); - - Float floatNumber = Float.valueOf(0.1f); - point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).addField("a", floatNumber).build(); - assertThat(point.lineProtocol()).asString().startsWith("test a=0.10"); - - BigDecimal bigDecimalNumber = BigDecimal.valueOf(100000000.00000001); - point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).addField("a", bigDecimalNumber).build(); - assertThat(point.lineProtocol()).asString().isEqualTo("test a=100000000.00000001 1"); - } - - @Test - public void testEscapingOfKeysAndValues() { - // Test escaping of spaces - Point point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).tag("foo", "bar baz").addField( "a", 1.0 ).build(); - assertThat(point.lineProtocol()).asString().isEqualTo("test,foo=bar\\ baz a=1.0 1"); - - // Test escaping of commas - point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).tag("foo", "bar,baz").addField( "a", 1.0 ).build(); - assertThat(point.lineProtocol()).asString().isEqualTo("test,foo=bar\\,baz a=1.0 1"); - - // Test escaping of equals sign - point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).tag("foo", "bar=baz").addField( "a", 1.0 ).build(); - assertThat(point.lineProtocol()).asString().isEqualTo("test,foo=bar\\=baz a=1.0 1"); - } - - @Test - public void testDeprecatedFieldMethodOnlyProducesFloatingPointValues() { - - Object[] ints = {(byte) 1, (short) 1, (int) 1, (long) 1, BigInteger.ONE}; - - for (Object intExample : ints) { - Point point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).field("a", intExample ).build(); - assertThat(point.lineProtocol()).asString().isEqualTo("test a=1.0 1"); - } - - } - /** - * Test for issue #117. - */ - @Test - public void testIgnoreNullPointerValue() { - // Test omission of null values - Point.Builder pointBuilder = Point.measurement("nulltest").time(1, TimeUnit.NANOSECONDS).tag("foo", "bar"); - - pointBuilder.field("field1", "value1"); - pointBuilder.field("field2", (Number) null); - pointBuilder.field("field3", (Integer) 1); - - Point point = pointBuilder.build(); - - assertThat(point.lineProtocol()).asString().isEqualTo("nulltest,foo=bar field1=\"value1\",field3=1.0 1"); - } - - /** - * Tests for issue #110 - */ - @Test(expected = IllegalArgumentException.class) - public void testAddingTagsWithNullNameThrowsAnError() { - Point.measurement("dontcare").tag(null, "DontCare"); - } - - @Test(expected = IllegalArgumentException.class) - public void testAddingTagsWithNullValueThrowsAnError() { - Point.measurement("dontcare").tag("DontCare", null); - } - - @Test(expected = IllegalArgumentException.class) - public void testAddingMapOfTagsWithNullNameThrowsAnError() { - Map map = Maps.newHashMap(); - map.put(null, "DontCare"); - Point.measurement("dontcare").tag(map); - } - - @Test(expected = IllegalArgumentException.class) - public void testAddingMapOfTagsWithNullValueThrowsAnError() { - Map map = Maps.newHashMap(); - map.put("DontCare", null); - Point.measurement("dontcare").tag(map); - } - - @Test(expected = IllegalArgumentException.class) - public void testNullValueThrowsExceptionViaAddField() { - Point.measurement("dontcare").addField("field", (String) null); - } - - /** - * Tests for issue #266 - */ - @Test - public void testEquals() throws Exception { - // GIVEN two point objects with identical data - Map fields = Maps.newHashMap(); - fields.put("foo", "bar"); - - String measurement = "measurement"; - - TimeUnit precision = TimeUnit.NANOSECONDS; - - Map tags = Maps.newHashMap(); - tags.put("bar", "baz"); - - Long time = System.currentTimeMillis(); - - Point p1 = new Point(); - p1.setFields(fields); - p1.setMeasurement(measurement); - p1.setPrecision(precision); - p1.setTags(tags); - p1.setTime(time); - - Point p2 = new Point(); - p2.setFields(fields); - p2.setMeasurement(measurement); - p2.setPrecision(precision); - p2.setTags(tags); - p2.setTime(time); - - // WHEN I call equals on one with the other as arg - boolean equals = p1.equals(p2); - - // THEN equals returns true - assertThat(equals).isEqualTo(true); - } - - @Test - public void testUnEquals() throws Exception { - // GIVEN two point objects with different data - Map fields1 = Maps.newHashMap(); - fields1.put("foo", "bar"); - - Map fields2 = Maps.newHashMap(); - fields2.put("foo", "baz"); - - String measurement = "measurement"; - - TimeUnit precision = TimeUnit.NANOSECONDS; - - Map tags = Maps.newHashMap(); - tags.put("bar", "baz"); - - Long time = System.currentTimeMillis(); - - Point p1 = new Point(); - p1.setFields(fields1); - p1.setMeasurement(measurement); - p1.setPrecision(precision); - p1.setTags(tags); - p1.setTime(time); - - Point p2 = new Point(); - p2.setFields(fields2); - p2.setMeasurement(measurement); - p2.setPrecision(precision); - p2.setTags(tags); - p2.setTime(time); - - // WHEN I call equals on one with the other as arg - boolean equals = p1.equals(p2); - - // THEN equals returns true - assertThat(equals).isEqualTo(false); - } + /** + * Test that lineprotocol is conformant to: + *

+ * https://github.com/influxdb/influxdb/blob/master/tsdb/README.md + */ + @Test + public void lineProtocol() { + Point point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).addField("a", 1.0).build(); + assertThat(point.lineProtocol()).asString().isEqualTo("test a=1.0 1"); + + point = Point.measurement("test,1").time(1, TimeUnit.NANOSECONDS).addField("a", 1.0).build(); + assertThat(point.lineProtocol()).asString().isEqualTo("test\\,1 a=1.0 1"); + + point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).addField("a", "A").build(); + assertThat(point.lineProtocol()).asString().isEqualTo("test a=\"A\" 1"); + + point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).addField("a", "A\"B").build(); + assertThat(point.lineProtocol()).asString().isEqualTo("test a=\"A\\\"B\" 1"); + + point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).addField("a", "A\"B\"C").build(); + assertThat(point.lineProtocol()).asString().isEqualTo("test a=\"A\\\"B\\\"C\" 1"); + + point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).addField("a", "A B C").build(); + assertThat(point.lineProtocol()).asString().isEqualTo("test a=\"A B C\" 1"); + + point = Point + .measurement("test") + .time(1, TimeUnit.NANOSECONDS) + .addField("a", "A\"B") + .addField("b", "D E \"F") + .build(); + assertThat(point.lineProtocol()).asString().isEqualTo("test a=\"A\\\"B\",b=\"D E \\\"F\" 1"); + + //Integer type + point = Point.measurement("inttest").time(1, TimeUnit.NANOSECONDS).addField("a", (Integer) 1).build(); + assertThat(point.lineProtocol()).asString().isEqualTo("inttest a=1i 1"); + + point = Point.measurement("inttest,1").time(1, TimeUnit.NANOSECONDS).addField("a", (Integer) 1).build(); + assertThat(point.lineProtocol()).asString().isEqualTo("inttest\\,1 a=1i 1"); + + point = Point.measurement("inttest,1").time(1, TimeUnit.NANOSECONDS).addField("a", 1L).build(); + assertThat(point.lineProtocol()).asString().isEqualTo("inttest\\,1 a=1i 1"); + + point = Point.measurement("inttest,1").time(1, TimeUnit.NANOSECONDS).addField("a", BigInteger.valueOf(100)) + .build(); + assertThat(point.lineProtocol()).asString().isEqualTo("inttest\\,1 a=100i 1"); + } + + /** + * Test for ticket #44 + */ + @Test + public void testTicket44() { + Point point = Point.measurement("test").time(1, TimeUnit.MICROSECONDS).addField("a", 1.0).build(); + assertThat(point.lineProtocol()).asString().isEqualTo("test a=1.0 1000"); + + point = Point.measurement("test").time(1, TimeUnit.MILLISECONDS).addField("a", 1.0).build(); + assertThat(point.lineProtocol()).asString().isEqualTo("test a=1.0 1000000"); + + point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).addField("a", 1.0).build(); + BatchPoints batchPoints = BatchPoints.database("db").point(point).build(); + assertThat(batchPoints.lineProtocol()).asString().isEqualTo("test a=1.0 1\n"); + + point = Point.measurement("test").time(1, TimeUnit.MICROSECONDS).addField("a", 1.0).build(); + batchPoints = BatchPoints.database("db").point(point).build(); + assertThat(batchPoints.lineProtocol()).asString().isEqualTo("test a=1.0 1000\n"); + + point = Point.measurement("test").time(1, TimeUnit.MILLISECONDS).addField("a", 1.0).build(); + batchPoints = BatchPoints.database("db").point(point).build(); + assertThat(batchPoints.lineProtocol()).asString().isEqualTo("test a=1.0 1000000\n"); + + point = Point.measurement("test").addField("a", 1.0).time(1, TimeUnit.MILLISECONDS).build(); + batchPoints = BatchPoints.database("db").build(); + batchPoints = batchPoints.point(point); + assertThat(batchPoints.lineProtocol()).asString().isEqualTo("test a=1.0 1000000\n"); + + } + + /** + * Test for ticket #54 + */ + @Test + public void testTicket54() { + Byte byteNumber = 100; + Point point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).addField("a", byteNumber).build(); + assertThat(point.lineProtocol()).asString().isEqualTo("test a=100i 1"); + + int intNumber = 100000000; + point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).addField("a", intNumber).build(); + assertThat(point.lineProtocol()).asString().isEqualTo("test a=100000000i 1"); + + Integer integerNumber = 100000000; + point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).addField("a", integerNumber).build(); + assertThat(point.lineProtocol()).asString().isEqualTo("test a=100000000i 1"); + + AtomicInteger atomicIntegerNumber = new AtomicInteger(100000000); + point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).addField("a", atomicIntegerNumber).build(); + assertThat(point.lineProtocol()).asString().isEqualTo("test a=100000000i 1"); + + Long longNumber = 1000000000000000000L; + point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).addField("a", longNumber).build(); + assertThat(point.lineProtocol()).asString().isEqualTo("test a=1000000000000000000i 1"); + + AtomicLong atomicLongNumber = new AtomicLong(1000000000000000000L); + point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).addField("a", atomicLongNumber).build(); + assertThat(point.lineProtocol()).asString().isEqualTo("test a=1000000000000000000i 1"); + + BigInteger bigIntegerNumber = BigInteger.valueOf(100000000); + point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).addField("a", bigIntegerNumber).build(); + assertThat(point.lineProtocol()).asString().isEqualTo("test a=100000000i 1"); + + Double doubleNumber = Double.valueOf(100000000.0001); + point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).addField("a", doubleNumber).build(); + assertThat(point.lineProtocol()).asString().isEqualTo("test a=100000000.0001 1"); + + Float floatNumber = Float.valueOf(0.1f); + point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).addField("a", floatNumber).build(); + assertThat(point.lineProtocol()).asString().startsWith("test a=0.10"); + + BigDecimal bigDecimalNumber = BigDecimal.valueOf(100000000.00000001); + point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).addField("a", bigDecimalNumber).build(); + assertThat(point.lineProtocol()).asString().isEqualTo("test a=100000000.00000001 1"); + } + + @Test + public void testEscapingOfKeysAndValues() { + // Test escaping of spaces + Point point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).tag("foo", "bar baz").addField("a", 1.0) + .build(); + assertThat(point.lineProtocol()).asString().isEqualTo("test,foo=bar\\ baz a=1.0 1"); + + // Test escaping of commas + point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).tag("foo", "bar,baz").addField("a", 1.0) + .build(); + assertThat(point.lineProtocol()).asString().isEqualTo("test,foo=bar\\,baz a=1.0 1"); + + // Test escaping of equals sign + point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).tag("foo", "bar=baz").addField("a", 1.0) + .build(); + assertThat(point.lineProtocol()).asString().isEqualTo("test,foo=bar\\=baz a=1.0 1"); + } + + @Test + public void testDeprecatedFieldMethodOnlyProducesFloatingPointValues() { + + Object[] ints = { (byte) 1, (short) 1, (int) 1, (long) 1, BigInteger.ONE }; + + for (Object intExample : ints) { + Point point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).field("a", intExample).build(); + assertThat(point.lineProtocol()).asString().isEqualTo("test a=1.0 1"); + } + + } + + /** + * Test for issue #117. + */ + @Test + public void testIgnoreNullPointerValue() { + // Test omission of null values + Point.Builder pointBuilder = Point.measurement("nulltest").time(1, TimeUnit.NANOSECONDS).tag("foo", "bar"); + + pointBuilder.field("field1", "value1"); + pointBuilder.field("field2", (Number) null); + pointBuilder.field("field3", (Integer) 1); + + Point point = pointBuilder.build(); + + assertThat(point.lineProtocol()).asString().isEqualTo("nulltest,foo=bar field1=\"value1\",field3=1.0 1"); + } + + /** + * Tests for issue #110 + */ + @Test(expected = IllegalArgumentException.class) + public void testAddingTagsWithNullNameThrowsAnError() { + Point.measurement("dontcare").tag(null, "DontCare"); + } + + @Test(expected = IllegalArgumentException.class) + public void testAddingTagsWithNullValueThrowsAnError() { + Point.measurement("dontcare").tag("DontCare", null); + } + + @Test(expected = IllegalArgumentException.class) + public void testAddingMapOfTagsWithNullNameThrowsAnError() { + Map map = Maps.newHashMap(); + map.put(null, "DontCare"); + Point.measurement("dontcare").tag(map); + } + + @Test(expected = IllegalArgumentException.class) + public void testAddingMapOfTagsWithNullValueThrowsAnError() { + Map map = Maps.newHashMap(); + map.put("DontCare", null); + Point.measurement("dontcare").tag(map); + } + + @Test(expected = IllegalArgumentException.class) + public void testNullValueThrowsExceptionViaAddField() { + Point.measurement("dontcare").addField("field", (String) null); + } + + /** + * Tests for issue #266 + */ + @Test + public void testEquals() throws Exception { + // GIVEN two point objects with identical data + Map fields = Maps.newHashMap(); + fields.put("foo", "bar"); + + String measurement = "measurement"; + + TimeUnit precision = TimeUnit.NANOSECONDS; + + Map tags = Maps.newHashMap(); + tags.put("bar", "baz"); + + Long time = System.currentTimeMillis(); + + Point p1 = new Point(); + p1.setFields(fields); + p1.setMeasurement(measurement); + p1.setPrecision(precision); + p1.setTags(tags); + p1.setTime(time); + + Point p2 = new Point(); + p2.setFields(fields); + p2.setMeasurement(measurement); + p2.setPrecision(precision); + p2.setTags(tags); + p2.setTime(time); + + // WHEN I call equals on one with the other as arg + boolean equals = p1.equals(p2); + + // THEN equals returns true + assertThat(equals).isEqualTo(true); + } + + @Test + public void testUnEquals() throws Exception { + // GIVEN two point objects with different data + Map fields1 = Maps.newHashMap(); + fields1.put("foo", "bar"); + + Map fields2 = Maps.newHashMap(); + fields2.put("foo", "baz"); + + String measurement = "measurement"; + + TimeUnit precision = TimeUnit.NANOSECONDS; + + Map tags = Maps.newHashMap(); + tags.put("bar", "baz"); + + Long time = System.currentTimeMillis(); + + Point p1 = new Point(); + p1.setFields(fields1); + p1.setMeasurement(measurement); + p1.setPrecision(precision); + p1.setTags(tags); + p1.setTime(time); + + Point p2 = new Point(); + p2.setFields(fields2); + p2.setMeasurement(measurement); + p2.setPrecision(precision); + p2.setTags(tags); + p2.setTime(time); + + // WHEN I call equals on one with the other as arg + boolean equals = p1.equals(p2); + + // THEN equals returns true + assertThat(equals).isEqualTo(false); + } + + /** + * Tests for #182 + * + * @throws Exception + */ + @Test + public void testLineProtocolNanosecondPrecision() throws Exception { + // GIVEN a point with millisecond precision + Date pDate = new Date(); + Point p = Point + .measurement("measurement") + .addField("foo", "bar") + .time(pDate.getTime(), TimeUnit.MILLISECONDS) + .build(); + + // WHEN i call lineProtocol(TimeUnit.NANOSECONDS) + String nanosTime = p.lineProtocol(TimeUnit.NANOSECONDS).replace("measurement foo=\"bar\" ", ""); + + // THEN the timestamp is in nanoseconds + assertThat(nanosTime).isEqualTo(String.valueOf(pDate.getTime() * 1000000)); + } + + @Test + public void testLineProtocolMicrosecondPrecision() throws Exception { + // GIVEN a point with millisecond precision + Date pDate = new Date(); + Point p = Point + .measurement("measurement") + .addField("foo", "bar") + .time(pDate.getTime(), TimeUnit.MILLISECONDS) + .build(); + + // WHEN i call lineProtocol(TimeUnit.MICROSECONDS) + String microsTime = p.lineProtocol(TimeUnit.MICROSECONDS).replace("measurement foo=\"bar\" ", ""); + + // THEN the timestamp is in microseconds + assertThat(microsTime).isEqualTo(String.valueOf(pDate.getTime() * 1000)); + } + + @Test + public void testLineProtocolMillisecondPrecision() throws Exception { + // GIVEN a point with millisecond precision + Date pDate = new Date(); + Point p = Point + .measurement("measurement") + .addField("foo", "bar") + .time(pDate.getTime(), TimeUnit.MILLISECONDS) + .build(); + + // WHEN i call lineProtocol(TimeUnit.MILLISECONDS) + String millisTime = p.lineProtocol(TimeUnit.MILLISECONDS).replace("measurement foo=\"bar\" ", ""); + + // THEN the timestamp is in microseconds + assertThat(millisTime).isEqualTo(String.valueOf(pDate.getTime())); + } + + @Test + public void testLineProtocolSecondPrecision() throws Exception { + // GIVEN a point with millisecond precision + Date pDate = new Date(); + Point p = Point + .measurement("measurement") + .addField("foo", "bar") + .time(pDate.getTime(), TimeUnit.MILLISECONDS) + .build(); + + // WHEN i call lineProtocol(TimeUnit.SECONDS) + String secondTime = p.lineProtocol(TimeUnit.SECONDS).replace("measurement foo=\"bar\" ", ""); + + // THEN the timestamp is in seconds + String expectedSecondTimeStamp = String.valueOf(pDate.getTime() / 1000); + assertThat(secondTime).isEqualTo(expectedSecondTimeStamp); + } + + @Test + public void testLineProtocolMinutePrecision() throws Exception { + // GIVEN a point with millisecond precision + Date pDate = new Date(); + Point p = Point + .measurement("measurement") + .addField("foo", "bar") + .time(pDate.getTime(), TimeUnit.MILLISECONDS) + .build(); + + // WHEN i call lineProtocol(TimeUnit.MINUTE) + String secondTime = p.lineProtocol(TimeUnit.MINUTES).replace("measurement foo=\"bar\" ", ""); + + // THEN the timestamp is in seconds + String expectedSecondTimeStamp = String.valueOf(pDate.getTime() / 60000); + assertThat(secondTime).isEqualTo(expectedSecondTimeStamp); + } + + @Test + public void testLineProtocolHourPrecision() throws Exception { + // GIVEN a point with millisecond precision + Date pDate = new Date(); + Point p = Point + .measurement("measurement") + .addField("foo", "bar") + .time(pDate.getTime(), TimeUnit.MILLISECONDS) + .build(); + + // WHEN i call lineProtocol(TimeUnit.NANOSECONDS) + String hourTime = p.lineProtocol(TimeUnit.HOURS).replace("measurement foo=\"bar\" ", ""); + + // THEN the timestamp is in hours + String expectedHourTimeStamp = String.valueOf(Math.round(pDate.getTime() / 3600000)); // 1000ms * 60s * 60m + assertThat(hourTime).isEqualTo(expectedHourTimeStamp); + } } From 237c9260e83423e63c96911bc071915488790732 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Marty?= Date: Tue, 23 May 2017 13:38:00 +0200 Subject: [PATCH 003/745] influxdb-java-182 Allow write precision of TimeUnit other than Nanoseconds --- src/main/java/org/influxdb/InfluxDB.java | 20 +++ .../java/org/influxdb/dto/BatchPoints.java | 41 ++++- src/main/java/org/influxdb/dto/Point.java | 20 +++ .../java/org/influxdb/impl/InfluxDBImpl.java | 35 +++-- src/test/java/org/influxdb/InfluxDBTest.java | 141 ++++++++++++++++++ src/test/java/org/influxdb/dto/PointTest.java | 117 ++++++++++++++- 6 files changed, 358 insertions(+), 16 deletions(-) diff --git a/src/main/java/org/influxdb/InfluxDB.java b/src/main/java/org/influxdb/InfluxDB.java index cad445c05..b98622e66 100644 --- a/src/main/java/org/influxdb/InfluxDB.java +++ b/src/main/java/org/influxdb/InfluxDB.java @@ -197,6 +197,16 @@ public InfluxDB enableBatch(final int actions, final int flushDuration, final Ti public void write(final String database, final String retentionPolicy, final ConsistencyLevel consistency, final String records); + /** + * Write a set of Points to the influxdb database with the string records. + * + * {@linkplain "https://github.com/influxdb/influxdb/pull/2696"} + * + * @param records + */ + public void write(final String database, final String retentionPolicy, + final ConsistencyLevel consistency, final TimeUnit precision, final String records); + /** * Write a set of Points to the influxdb database with the list of string records. * @@ -207,6 +217,16 @@ public void write(final String database, final String retentionPolicy, public void write(final String database, final String retentionPolicy, final ConsistencyLevel consistency, final List records); + /** + * Write a set of Points to the influxdb database with the list of string records. + * + * {@linkplain "https://github.com/influxdb/influxdb/pull/2696"} + * + * @param records + */ + public void write(final String database, final String retentionPolicy, + final ConsistencyLevel consistency, final TimeUnit precision, final List records); + /** * Write a set of Points to the influxdb database with the string records through UDP. * diff --git a/src/main/java/org/influxdb/dto/BatchPoints.java b/src/main/java/org/influxdb/dto/BatchPoints.java index 0f39540d7..8886fea69 100644 --- a/src/main/java/org/influxdb/dto/BatchPoints.java +++ b/src/main/java/org/influxdb/dto/BatchPoints.java @@ -4,6 +4,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.concurrent.TimeUnit; import org.influxdb.InfluxDB.ConsistencyLevel; @@ -27,6 +28,7 @@ public class BatchPoints { private Map tags; private List points; private ConsistencyLevel consistency; + private TimeUnit precision; BatchPoints() { // Only visible in the Builder @@ -52,6 +54,7 @@ public static final class Builder { private final Map tags = Maps.newTreeMap(Ordering.natural()); private final List points = Lists.newArrayList(); private ConsistencyLevel consistency; + private TimeUnit precision; /** * @param database @@ -118,6 +121,16 @@ public Builder consistency(final ConsistencyLevel consistencyLevel) { return this; } + /** + * Set the time precision to use for the whole batch. If unspecified, will default to {@link TimeUnit#NANOSECONDS} + * @param precision + * @return the Builder instance + */ + public Builder precision(final TimeUnit precision) { + this.precision = precision; + return this; + } + /** * Create a new BatchPoints instance. * @@ -138,6 +151,10 @@ public BatchPoints build() { this.consistency = ConsistencyLevel.ONE; } batchPoints.setConsistency(this.consistency); + if (null == this.precision) { + this.precision = TimeUnit.NANOSECONDS; + } + batchPoints.setPrecision(this.precision); return batchPoints; } } @@ -187,6 +204,20 @@ void setPoints(final List points) { this.points = points; } + /** + * @return the time precision unit + */ + public TimeUnit getPrecision() { + return precision; + } + + /** + * @param precision the time precision to set for the batch points + */ + public void setPrecision(TimeUnit precision) { + this.precision = precision; + } + /** * Add a single Point to these batches. * @@ -242,12 +273,13 @@ public boolean equals(final Object o) { && Objects.equals(retentionPolicy, that.retentionPolicy) && Objects.equals(tags, that.tags) && Objects.equals(points, that.points) - && consistency == that.consistency; + && consistency == that.consistency + && precision == that.precision; } @Override public int hashCode() { - return Objects.hash(database, retentionPolicy, tags, points, consistency); + return Objects.hash(database, retentionPolicy, tags, points, consistency, precision); } /** @@ -264,6 +296,8 @@ public String toString() { builder.append(this.consistency); builder.append(", tags="); builder.append(this.tags); + builder.append(", precision="); + builder.append(this.precision); builder.append(", points="); builder.append(this.points); builder.append("]"); @@ -278,8 +312,9 @@ public String toString() { */ public String lineProtocol() { StringBuilder sb = new StringBuilder(); + for (Point point : this.points) { - sb.append(point.lineProtocol()).append("\n"); + sb.append(point.lineProtocol(this.precision)).append("\n"); } return sb.toString(); } diff --git a/src/main/java/org/influxdb/dto/Point.java b/src/main/java/org/influxdb/dto/Point.java index d1dd78a7c..c00e9128a 100644 --- a/src/main/java/org/influxdb/dto/Point.java +++ b/src/main/java/org/influxdb/dto/Point.java @@ -328,6 +328,20 @@ public String lineProtocol() { return sb.toString(); } + /** + * Calculate the lineprotocol entry for a single point, using a specific {@link TimeUnit} for the timestamp + * @param precision the time precision unit for this point + * @return the String without newLine + */ + public String lineProtocol(final TimeUnit precision) { + final StringBuilder sb = new StringBuilder(); + sb.append(KEY_ESCAPER.escape(this.measurement)); + sb.append(concatenatedTags()); + sb.append(concatenateFields()); + sb.append(formatedTime(precision)); + return sb.toString(); + } + private StringBuilder concatenatedTags() { final StringBuilder sb = new StringBuilder(); for (Entry tag : this.tags.entrySet()) { @@ -385,4 +399,10 @@ private StringBuilder formatedTime() { return sb; } + private StringBuilder formatedTime(TimeUnit precision) { + final StringBuilder sb = new StringBuilder(); + sb.append(" ").append(precision.convert(this.time, this.precision)); + return sb; + } + } diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index 884787cab..f3ec4632d 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -269,31 +269,46 @@ public void write(final BatchPoints batchPoints) { this.password, batchPoints.getDatabase(), batchPoints.getRetentionPolicy(), - TimeUtil.toTimePrecision(TimeUnit.NANOSECONDS), + TimeUtil.toTimePrecision(batchPoints.getPrecision()), batchPoints.getConsistency().value(), lineProtocol)); } + + @Override + public void write(String database, String retentionPolicy, ConsistencyLevel consistency, + TimeUnit precision, String records) { + execute(this.influxDBService.writePoints( + this.username, + this.password, + database, + retentionPolicy, + TimeUtil.toTimePrecision(precision), + consistency.value(), + RequestBody.create(MEDIA_TYPE_STRING, records))); + } + @Override public void write(final String database, final String retentionPolicy, final ConsistencyLevel consistency, final String records) { - execute(this.influxDBService.writePoints( - this.username, - this.password, - database, - retentionPolicy, - TimeUtil.toTimePrecision(TimeUnit.NANOSECONDS), - consistency.value(), - RequestBody.create(MEDIA_TYPE_STRING, records))); + write(database, retentionPolicy, consistency, TimeUnit.NANOSECONDS, records); } @Override public void write(final String database, final String retentionPolicy, final ConsistencyLevel consistency, final List records) { + write(database, retentionPolicy, consistency, TimeUnit.NANOSECONDS, records); + } + + + @Override + public void write(String database, String retentionPolicy, ConsistencyLevel consistency, + TimeUnit precision, List records) { final String joinedRecords = Joiner.on("\n").join(records); - write(database, retentionPolicy, consistency, joinedRecords); + write(database, retentionPolicy, consistency, precision, joinedRecords); } + /** * {@inheritDoc} */ diff --git a/src/test/java/org/influxdb/InfluxDBTest.java b/src/test/java/org/influxdb/InfluxDBTest.java index 116f42552..0ef5794d3 100644 --- a/src/test/java/org/influxdb/InfluxDBTest.java +++ b/src/test/java/org/influxdb/InfluxDBTest.java @@ -1,5 +1,7 @@ package org.influxdb; +import static org.assertj.core.api.Assertions.assertThat; + import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; @@ -19,12 +21,14 @@ import org.influxdb.dto.Query; import org.influxdb.dto.QueryResult; import org.influxdb.impl.InfluxDBImpl; +import org.influxdb.impl.TimeUtil; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; +import com.google.common.collect.Lists; import com.google.common.util.concurrent.Uninterruptibles; /** @@ -362,6 +366,143 @@ public void testWriteMultipleStringDataLines() { this.influxDB.deleteDatabase(dbName); } + /** + * Tests writing points using the time precision feature + * @throws Exception + */ + @Test + public void testWriteBatchWithPrecision() throws Exception { + // GIVEN a database and a measurement + String dbName = "precision_unittest_" + System.currentTimeMillis(); + this.influxDB.createDatabase(dbName); + + String rp = TestUtils.defaultRetentionPolicy(this.influxDB.version()); + + String measurement = TestUtils.getRandomMeasurement(); + + // GIVEN a batch of points using second precision + Point p1 = Point + .measurement(measurement) + .addField("foo", 1d) + .tag("device", "one") + .time(1485273600, TimeUnit.SECONDS).build(); // 2017-01-27T16:00:00 + String timeP1 = TimeUtil.toInfluxDBTimeFormat(1485273600000L); + Point p2 = Point + .measurement(measurement) + .addField("foo", 2d) + .tag("device", "two") + .time(1485277200, TimeUnit.SECONDS).build(); // 2017-01-27T17:00:00 + String timeP2 = TimeUtil.toInfluxDBTimeFormat(1485277200000L); + Point p3 = Point + .measurement(measurement) + .addField("foo", 3d) + .tag("device", "three") + .time(1485280800, TimeUnit.SECONDS).build(); // 2017-01-27T18:00:00 + String timeP3 = TimeUtil.toInfluxDBTimeFormat(1485280800000L); + + BatchPoints batchPoints = BatchPoints + .database(dbName) + .retentionPolicy(rp) + .precision(TimeUnit.SECONDS) + .points(p1, p2, p3) + .build(); + + // WHEN I write the batch + this.influxDB.write(batchPoints); + + // THEN the measure points have a timestamp with second precision + QueryResult queryResult = this.influxDB.query(new Query("SELECT * FROM " + measurement, dbName)); + assertThat(queryResult.getResults().get(0).getSeries().get(0).getValues().size()).isEqualTo(3); + assertThat(queryResult.getResults().get(0).getSeries().get(0).getValues().get(0).get(0)).isEqualTo(timeP1); + assertThat(queryResult.getResults().get(0).getSeries().get(0).getValues().get(1).get(0)).isEqualTo(timeP2); + assertThat(queryResult.getResults().get(0).getSeries().get(0).getValues().get(2).get(0)).isEqualTo(timeP3); + + this.influxDB.deleteDatabase(dbName); + } + + @Test + public void testWriteBatchWithoutPrecision() throws Exception { + // GIVEN a database and a measurement + String dbName = "precision_unittest_" + System.currentTimeMillis(); + this.influxDB.createDatabase(dbName); + + String rp = TestUtils.defaultRetentionPolicy(this.influxDB.version()); + + String measurement = TestUtils.getRandomMeasurement(); + + // GIVEN a batch of points that has no specific precision + Point p1 = Point + .measurement(measurement) + .addField("foo", 1d) + .tag("device", "one") + .time(1485273600000000100L, TimeUnit.NANOSECONDS).build(); // 2017-01-27T16:00:00.000000100Z + String timeP1 = "2017-01-27T16:00:00.000000100Z"; + Point p2 = Point + .measurement(measurement) + .addField("foo", 2d) + .tag("device", "two") + .time(1485277200000000200L, TimeUnit.NANOSECONDS).build(); // 2017-01-27T17:00:00.000000200Z + String timeP2 = "2017-01-27T17:00:00.000000200Z"; + Point p3 = Point + .measurement(measurement) + .addField("foo", 3d) + .tag("device", "three") + .time(1485280800000000300L, TimeUnit.NANOSECONDS).build(); // 2017-01-27T18:00:00.000000300Z + String timeP3 = "2017-01-27T18:00:00.000000300Z"; + + BatchPoints batchPoints = BatchPoints + .database(dbName) + .retentionPolicy(rp) + .points(p1, p2, p3) + .build(); + + // WHEN I write the batch + this.influxDB.write(batchPoints); + + // THEN the measure points have a timestamp with second precision + QueryResult queryResult = this.influxDB.query(new Query("SELECT * FROM " + measurement, dbName)); + assertThat(queryResult.getResults().get(0).getSeries().get(0).getValues().size()).isEqualTo(3); + assertThat(queryResult.getResults().get(0).getSeries().get(0).getValues().get(0).get(0)).isEqualTo(timeP1); + assertThat(queryResult.getResults().get(0).getSeries().get(0).getValues().get(1).get(0)).isEqualTo(timeP2); + assertThat(queryResult.getResults().get(0).getSeries().get(0).getValues().get(2).get(0)).isEqualTo(timeP3); + + this.influxDB.deleteDatabase(dbName); + } + + @Test + public void testWriteRecordsWithPrecision() throws Exception { + // GIVEN a database and a measurement + String dbName = "precision_unittest_" + System.currentTimeMillis(); + this.influxDB.createDatabase(dbName); + + String rp = TestUtils.defaultRetentionPolicy(this.influxDB.version()); + + String measurement = TestUtils.getRandomMeasurement(); + + // GIVEN a set of records using second precision + List records = Lists.newArrayList(); + records.add("cpu,atag=test1 idle=100,usertime=10,system=1 1485273600"); + String timeP1 = TimeUtil.toInfluxDBTimeFormat(1485273600000L); + + records.add("cpu,atag=test2 idle=200,usertime=20,system=2 1485277200"); + String timeP2 = TimeUtil.toInfluxDBTimeFormat(1485277200000L); + + records.add("cpu,atag=test3 idle=300,usertime=30,system=3 1485280800"); + String timeP3 = TimeUtil.toInfluxDBTimeFormat(1485280800000L); + + // WHEN I write the batch + this.influxDB.write(dbName, rp, null, TimeUnit.SECONDS, records); + + // THEN the measure points have a timestamp with second precision + QueryResult queryResult = this.influxDB.query(new Query("SELECT * FROM " + measurement, dbName)); + assertThat(queryResult.getResults().get(0).getSeries().get(0).getValues().size()).isEqualTo(3); + assertThat(queryResult.getResults().get(0).getSeries().get(0).getValues().get(0).get(0)).isEqualTo(timeP1); + assertThat(queryResult.getResults().get(0).getSeries().get(0).getValues().get(1).get(0)).isEqualTo(timeP2); + assertThat(queryResult.getResults().get(0).getSeries().get(0).getValues().get(2).get(0)).isEqualTo(timeP3); + + this.influxDB.deleteDatabase(dbName); + } + /** * Test that creating database which name is composed of numbers only works */ diff --git a/src/test/java/org/influxdb/dto/PointTest.java b/src/test/java/org/influxdb/dto/PointTest.java index 62301db39..fb9c1226e 100644 --- a/src/test/java/org/influxdb/dto/PointTest.java +++ b/src/test/java/org/influxdb/dto/PointTest.java @@ -4,6 +4,7 @@ import java.math.BigDecimal; import java.math.BigInteger; +import java.util.Date; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; @@ -314,7 +315,117 @@ public void testUnEquals() throws Exception { // WHEN I call equals on one with the other as arg boolean equals = p1.equals(p2); - // THEN equals returns true - assertThat(equals).isEqualTo(false); - } + // THEN equals returns true + assertThat(equals).isEqualTo(false); + } + + /** + * Tests for #182 + * + * @throws Exception + */ + @Test + public void testLineProtocolNanosecondPrecision() throws Exception { + // GIVEN a point with millisecond precision + Date pDate = new Date(); + Point p = Point + .measurement("measurement") + .addField("foo", "bar") + .time(pDate.getTime(), TimeUnit.MILLISECONDS) + .build(); + + // WHEN i call lineProtocol(TimeUnit.NANOSECONDS) + String nanosTime = p.lineProtocol(TimeUnit.NANOSECONDS).replace("measurement foo=\"bar\" ", ""); + + // THEN the timestamp is in nanoseconds + assertThat(nanosTime).isEqualTo(String.valueOf(pDate.getTime() * 1000000)); + } + + @Test + public void testLineProtocolMicrosecondPrecision() throws Exception { + // GIVEN a point with millisecond precision + Date pDate = new Date(); + Point p = Point + .measurement("measurement") + .addField("foo", "bar") + .time(pDate.getTime(), TimeUnit.MILLISECONDS) + .build(); + + // WHEN i call lineProtocol(TimeUnit.MICROSECONDS) + String microsTime = p.lineProtocol(TimeUnit.MICROSECONDS).replace("measurement foo=\"bar\" ", ""); + + // THEN the timestamp is in microseconds + assertThat(microsTime).isEqualTo(String.valueOf(pDate.getTime() * 1000)); + } + + @Test + public void testLineProtocolMillisecondPrecision() throws Exception { + // GIVEN a point with millisecond precision + Date pDate = new Date(); + Point p = Point + .measurement("measurement") + .addField("foo", "bar") + .time(pDate.getTime(), TimeUnit.MILLISECONDS) + .build(); + + // WHEN i call lineProtocol(TimeUnit.MILLISECONDS) + String millisTime = p.lineProtocol(TimeUnit.MILLISECONDS).replace("measurement foo=\"bar\" ", ""); + + // THEN the timestamp is in microseconds + assertThat(millisTime).isEqualTo(String.valueOf(pDate.getTime())); + } + + @Test + public void testLineProtocolSecondPrecision() throws Exception { + // GIVEN a point with millisecond precision + Date pDate = new Date(); + Point p = Point + .measurement("measurement") + .addField("foo", "bar") + .time(pDate.getTime(), TimeUnit.MILLISECONDS) + .build(); + + // WHEN i call lineProtocol(TimeUnit.SECONDS) + String secondTime = p.lineProtocol(TimeUnit.SECONDS).replace("measurement foo=\"bar\" ", ""); + + // THEN the timestamp is in seconds + String expectedSecondTimeStamp = String.valueOf(pDate.getTime() / 1000); + assertThat(secondTime).isEqualTo(expectedSecondTimeStamp); + } + + @Test + public void testLineProtocolMinutePrecision() throws Exception { + // GIVEN a point with millisecond precision + Date pDate = new Date(); + Point p = Point + .measurement("measurement") + .addField("foo", "bar") + .time(pDate.getTime(), TimeUnit.MILLISECONDS) + .build(); + + // WHEN i call lineProtocol(TimeUnit.MINUTE) + String secondTime = p.lineProtocol(TimeUnit.MINUTES).replace("measurement foo=\"bar\" ", ""); + + // THEN the timestamp is in seconds + String expectedSecondTimeStamp = String.valueOf(pDate.getTime() / 60000); + assertThat(secondTime).isEqualTo(expectedSecondTimeStamp); + } + + @Test + public void testLineProtocolHourPrecision() throws Exception { + // GIVEN a point with millisecond precision + Date pDate = new Date(); + Point p = Point + .measurement("measurement") + .addField("foo", "bar") + .time(pDate.getTime(), TimeUnit.MILLISECONDS) + .build(); + + // WHEN i call lineProtocol(TimeUnit.NANOSECONDS) + String hourTime = p.lineProtocol(TimeUnit.HOURS).replace("measurement foo=\"bar\" ", ""); + + // THEN the timestamp is in hours + String expectedHourTimeStamp = String.valueOf(Math.round(pDate.getTime() / 3600000)); // 1000ms * 60s * 60m + assertThat(hourTime).isEqualTo(expectedHourTimeStamp); + } } From 7c185d1e9c47fa96c3599092f606fd46604749e5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Marty?= Date: Tue, 23 May 2017 15:52:52 +0200 Subject: [PATCH 004/745] influxdb-java-182 Allow write precision of TimeUnit other than Nanoseconds checkstyle violations fix --- src/main/java/org/influxdb/dto/BatchPoints.java | 17 ++++++++--------- src/main/java/org/influxdb/dto/Point.java | 14 +++++++------- .../java/org/influxdb/impl/InfluxDBImpl.java | 8 ++++---- 3 files changed, 19 insertions(+), 20 deletions(-) diff --git a/src/main/java/org/influxdb/dto/BatchPoints.java b/src/main/java/org/influxdb/dto/BatchPoints.java index 8886fea69..c1bee4383 100644 --- a/src/main/java/org/influxdb/dto/BatchPoints.java +++ b/src/main/java/org/influxdb/dto/BatchPoints.java @@ -1,18 +1,17 @@ package org.influxdb.dto; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.concurrent.TimeUnit; - -import org.influxdb.InfluxDB.ConsistencyLevel; - import com.google.common.base.Preconditions; import com.google.common.base.Strings; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Ordering; +import org.influxdb.InfluxDB.ConsistencyLevel; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.TimeUnit; /** * {Purpose of This Type}. @@ -214,7 +213,7 @@ public TimeUnit getPrecision() { /** * @param precision the time precision to set for the batch points */ - public void setPrecision(TimeUnit precision) { + public void setPrecision(final TimeUnit precision) { this.precision = precision; } diff --git a/src/main/java/org/influxdb/dto/Point.java b/src/main/java/org/influxdb/dto/Point.java index c00e9128a..647dbdbd4 100644 --- a/src/main/java/org/influxdb/dto/Point.java +++ b/src/main/java/org/influxdb/dto/Point.java @@ -1,5 +1,10 @@ package org.influxdb.dto; +import com.google.common.base.Preconditions; +import com.google.common.base.Strings; +import com.google.common.escape.Escaper; +import com.google.common.escape.Escapers; + import java.math.BigDecimal; import java.math.BigInteger; import java.text.NumberFormat; @@ -10,11 +15,6 @@ import java.util.TreeMap; import java.util.concurrent.TimeUnit; -import com.google.common.base.Preconditions; -import com.google.common.base.Strings; -import com.google.common.escape.Escaper; -import com.google.common.escape.Escapers; - /** * Representation of a InfluxDB database Point. * @@ -329,7 +329,7 @@ public String lineProtocol() { } /** - * Calculate the lineprotocol entry for a single point, using a specific {@link TimeUnit} for the timestamp + * Calculate the lineprotocol entry for a single point, using a specific {@link TimeUnit} for the timestamp. * @param precision the time precision unit for this point * @return the String without newLine */ @@ -399,7 +399,7 @@ private StringBuilder formatedTime() { return sb; } - private StringBuilder formatedTime(TimeUnit precision) { + private StringBuilder formatedTime(final TimeUnit precision) { final StringBuilder sb = new StringBuilder(); sb.append(" ").append(precision.convert(this.time, this.precision)); return sb; diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index f3ec4632d..247adf543 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -276,8 +276,8 @@ public void write(final BatchPoints batchPoints) { @Override - public void write(String database, String retentionPolicy, ConsistencyLevel consistency, - TimeUnit precision, String records) { + public void write(final String database, final String retentionPolicy, final ConsistencyLevel consistency, + final TimeUnit precision, final String records) { execute(this.influxDBService.writePoints( this.username, this.password, @@ -302,8 +302,8 @@ public void write(final String database, final String retentionPolicy, final Con @Override - public void write(String database, String retentionPolicy, ConsistencyLevel consistency, - TimeUnit precision, List records) { + public void write(final String database, final String retentionPolicy, final ConsistencyLevel consistency, + final TimeUnit precision, final List records) { final String joinedRecords = Joiner.on("\n").join(records); write(database, retentionPolicy, consistency, precision, joinedRecords); } From 701519796c22bedbd364106c53f5f509af01ab56 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Marty?= Date: Tue, 23 May 2017 16:54:39 +0200 Subject: [PATCH 005/745] influxdb-java-182 Allow write precision of TimeUnit other than Nanoseconds tests failing fixes --- src/test/java/org/influxdb/InfluxDBTest.java | 66 ++++++++++++-------- 1 file changed, 41 insertions(+), 25 deletions(-) diff --git a/src/test/java/org/influxdb/InfluxDBTest.java b/src/test/java/org/influxdb/InfluxDBTest.java index 0ef5794d3..9ef6ae188 100644 --- a/src/test/java/org/influxdb/InfluxDBTest.java +++ b/src/test/java/org/influxdb/InfluxDBTest.java @@ -3,6 +3,9 @@ import static org.assertj.core.api.Assertions.assertThat; import java.io.IOException; +import java.time.Instant; +import java.time.ZoneId; +import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -73,7 +76,7 @@ public void setUp() throws InterruptedException, IOException { System.out.println("# Connected to InfluxDB Version: " + this.influxDB.version() + " #"); System.out.println("##################################################################################"); } - + /** * delete UDP database after all tests end. */ @@ -133,7 +136,7 @@ public void testDescribeDatabases() { Assert.assertTrue("It is expected that describeDataBases contents the newly create database.", found); this.influxDB.deleteDatabase(dbName); } - + /** * Test that Database exists works. */ @@ -174,7 +177,7 @@ public void testWrite() { Assert.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); this.influxDB.deleteDatabase(dbName); } - + /** * Test the implementation of {@link InfluxDB#write(int, Point)}'s sync support. */ @@ -189,7 +192,7 @@ public void testSyncWritePointThroughUDP() { QueryResult result = this.influxDB.query(query); Assert.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); } - + /** * Test the implementation of {@link InfluxDB#write(int, Point)}'s async support. */ @@ -209,8 +212,8 @@ public void testAsyncWritePointThroughUDP() { this.influxDB.disableBatch(); } } - - + + /** * Test the implementation of {@link InfluxDB#write(int, Point)}'s async support. */ @@ -381,24 +384,32 @@ public void testWriteBatchWithPrecision() throws Exception { String measurement = TestUtils.getRandomMeasurement(); // GIVEN a batch of points using second precision + DateTimeFormatter formatter = DateTimeFormatter + .ofPattern("yyyy-MM-dd'T'HH:mm:ss'Z'") + .withZone(ZoneId.of("UTC")); + int t1 = 1485273600; Point p1 = Point .measurement(measurement) .addField("foo", 1d) .tag("device", "one") - .time(1485273600, TimeUnit.SECONDS).build(); // 2017-01-27T16:00:00 - String timeP1 = TimeUtil.toInfluxDBTimeFormat(1485273600000L); + .time(t1, TimeUnit.SECONDS).build(); // 2017-01-27T16:00:00 + String timeP1 = formatter.format(Instant.ofEpochSecond(t1)); + + int t2 = 1485277200; Point p2 = Point .measurement(measurement) .addField("foo", 2d) .tag("device", "two") - .time(1485277200, TimeUnit.SECONDS).build(); // 2017-01-27T17:00:00 - String timeP2 = TimeUtil.toInfluxDBTimeFormat(1485277200000L); + .time(t2, TimeUnit.SECONDS).build(); // 2017-01-27T17:00:00 + String timeP2 = formatter.format(Instant.ofEpochSecond(t2)); + + int t3 = 1485280800; Point p3 = Point .measurement(measurement) .addField("foo", 3d) .tag("device", "three") - .time(1485280800, TimeUnit.SECONDS).build(); // 2017-01-27T18:00:00 - String timeP3 = TimeUtil.toInfluxDBTimeFormat(1485280800000L); + .time(t3, TimeUnit.SECONDS).build(); // 2017-01-27T18:00:00 + String timeP3 = formatter.format(Instant.ofEpochSecond(t3)); BatchPoints batchPoints = BatchPoints .database(dbName) @@ -431,24 +442,29 @@ public void testWriteBatchWithoutPrecision() throws Exception { String measurement = TestUtils.getRandomMeasurement(); // GIVEN a batch of points that has no specific precision + long t1 = 1485273600000000100L; Point p1 = Point .measurement(measurement) .addField("foo", 1d) .tag("device", "one") - .time(1485273600000000100L, TimeUnit.NANOSECONDS).build(); // 2017-01-27T16:00:00.000000100Z - String timeP1 = "2017-01-27T16:00:00.000000100Z"; + .time(t1, TimeUnit.NANOSECONDS).build(); // 2017-01-27T16:00:00.000000100Z + Double timeP1 = Double.valueOf(t1); + + long t2 = 1485277200000000200L; Point p2 = Point .measurement(measurement) .addField("foo", 2d) .tag("device", "two") - .time(1485277200000000200L, TimeUnit.NANOSECONDS).build(); // 2017-01-27T17:00:00.000000200Z - String timeP2 = "2017-01-27T17:00:00.000000200Z"; + .time(t2, TimeUnit.NANOSECONDS).build(); // 2017-01-27T17:00:00.000000200Z + Double timeP2 = Double.valueOf(t2); + + long t3 = 1485280800000000300L; Point p3 = Point .measurement(measurement) .addField("foo", 3d) .tag("device", "three") - .time(1485280800000000300L, TimeUnit.NANOSECONDS).build(); // 2017-01-27T18:00:00.000000300Z - String timeP3 = "2017-01-27T18:00:00.000000300Z"; + .time(t3, TimeUnit.NANOSECONDS).build(); // 2017-01-27T18:00:00.000000300Z + Double timeP3 = Double.valueOf(t3); BatchPoints batchPoints = BatchPoints .database(dbName) @@ -460,7 +476,7 @@ public void testWriteBatchWithoutPrecision() throws Exception { this.influxDB.write(batchPoints); // THEN the measure points have a timestamp with second precision - QueryResult queryResult = this.influxDB.query(new Query("SELECT * FROM " + measurement, dbName)); + QueryResult queryResult = this.influxDB.query(new Query("SELECT * FROM " + measurement, dbName), TimeUnit.NANOSECONDS); assertThat(queryResult.getResults().get(0).getSeries().get(0).getValues().size()).isEqualTo(3); assertThat(queryResult.getResults().get(0).getSeries().get(0).getValues().get(0).get(0)).isEqualTo(timeP1); assertThat(queryResult.getResults().get(0).getSeries().get(0).getValues().get(1).get(0)).isEqualTo(timeP2); @@ -491,7 +507,7 @@ public void testWriteRecordsWithPrecision() throws Exception { String timeP3 = TimeUtil.toInfluxDBTimeFormat(1485280800000L); // WHEN I write the batch - this.influxDB.write(dbName, rp, null, TimeUnit.SECONDS, records); + this.influxDB.write(dbName, rp, InfluxDB.ConsistencyLevel.ALL, TimeUnit.SECONDS, records); // THEN the measure points have a timestamp with second precision QueryResult queryResult = this.influxDB.query(new Query("SELECT * FROM " + measurement, dbName)); @@ -515,7 +531,7 @@ public void testCreateNumericNamedDatabase() { Assert.assertTrue(result.contains(numericDbName)); this.influxDB.deleteDatabase(numericDbName); } - + /** * Test that creating database which name is empty will throw expected exception */ @@ -553,7 +569,7 @@ public void testIsBatchEnabled() { this.influxDB.disableBatch(); Assert.assertFalse(this.influxDB.isBatchEnabled()); } - + /** * Test the implementation of {@link InfluxDB#enableBatch(int, int, TimeUnit, ThreadFactory)}. */ @@ -561,7 +577,7 @@ public void testIsBatchEnabled() { public void testBatchEnabledWithThreadFactory() { final String threadName = "async_influxdb_write"; this.influxDB.enableBatch(1, 1, TimeUnit.SECONDS, new ThreadFactory() { - + @Override public Thread newThread(Runnable r) { Thread thread = new Thread(r); @@ -576,7 +592,7 @@ public Thread newThread(Runnable r) { existThreadWithSettedName = true; break; } - + } Assert.assertTrue(existThreadWithSettedName); this.influxDB.disableBatch(); @@ -586,7 +602,7 @@ public Thread newThread(Runnable r) { public void testBatchEnabledWithThreadFactoryIsNull() { this.influxDB.enableBatch(1, 1, TimeUnit.SECONDS, null); } - + /** * Test the implementation of {@link InfluxDBImpl#InfluxDBImpl(String, String, String, okhttp3.OkHttpClient.Builder)}. */ From 1b26692690c7f8f080c2a8c4f316049053d867a6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Marty?= Date: Tue, 23 May 2017 17:19:15 +0200 Subject: [PATCH 006/745] influxdb-java-182 Allow write precision of TimeUnit other than Nanoseconds tests failing fixes --- src/test/java/org/influxdb/InfluxDBTest.java | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/src/test/java/org/influxdb/InfluxDBTest.java b/src/test/java/org/influxdb/InfluxDBTest.java index 9ef6ae188..00fd32bf9 100644 --- a/src/test/java/org/influxdb/InfluxDBTest.java +++ b/src/test/java/org/influxdb/InfluxDBTest.java @@ -496,18 +496,21 @@ public void testWriteRecordsWithPrecision() throws Exception { String measurement = TestUtils.getRandomMeasurement(); // GIVEN a set of records using second precision + DateTimeFormatter formatter = DateTimeFormatter + .ofPattern("yyyy-MM-dd'T'HH:mm:ss'Z'") + .withZone(ZoneId.of("UTC")); List records = Lists.newArrayList(); - records.add("cpu,atag=test1 idle=100,usertime=10,system=1 1485273600"); - String timeP1 = TimeUtil.toInfluxDBTimeFormat(1485273600000L); + records.add(measurement + ",atag=test1 idle=100,usertime=10,system=1 1485273600"); + String timeP1 = formatter.format(Instant.ofEpochSecond(1485273600)); - records.add("cpu,atag=test2 idle=200,usertime=20,system=2 1485277200"); - String timeP2 = TimeUtil.toInfluxDBTimeFormat(1485277200000L); + records.add(measurement + ",atag=test2 idle=200,usertime=20,system=2 1485277200"); + String timeP2 = formatter.format(Instant.ofEpochSecond(1485277200)); - records.add("cpu,atag=test3 idle=300,usertime=30,system=3 1485280800"); - String timeP3 = TimeUtil.toInfluxDBTimeFormat(1485280800000L); + records.add(measurement + ",atag=test3 idle=300,usertime=30,system=3 1485280800"); + String timeP3 = formatter.format(Instant.ofEpochSecond(1485280800)); // WHEN I write the batch - this.influxDB.write(dbName, rp, InfluxDB.ConsistencyLevel.ALL, TimeUnit.SECONDS, records); + this.influxDB.write(dbName, rp, InfluxDB.ConsistencyLevel.ONE, TimeUnit.SECONDS, records); // THEN the measure points have a timestamp with second precision QueryResult queryResult = this.influxDB.query(new Query("SELECT * FROM " + measurement, dbName)); From 4961f31d103bc3f152dee72c43cca9e7bf93d032 Mon Sep 17 00:00:00 2001 From: Simon Legner Date: Thu, 25 May 2017 09:57:39 +0200 Subject: [PATCH 007/745] Replace RuntimeException with InfluxDBException Catched IOExceptions are wrapped in InfluxDBIOException. --- .../java/org/influxdb/InfluxDBException.java | 21 ++++++++++++++++++ .../org/influxdb/InfluxDBIOException.java | 15 +++++++++++++ src/main/java/org/influxdb/dto/Query.java | 2 +- .../java/org/influxdb/impl/InfluxDBImpl.java | 22 ++++++++++--------- 4 files changed, 49 insertions(+), 11 deletions(-) create mode 100644 src/main/java/org/influxdb/InfluxDBException.java create mode 100644 src/main/java/org/influxdb/InfluxDBIOException.java diff --git a/src/main/java/org/influxdb/InfluxDBException.java b/src/main/java/org/influxdb/InfluxDBException.java new file mode 100644 index 000000000..69f28e0b3 --- /dev/null +++ b/src/main/java/org/influxdb/InfluxDBException.java @@ -0,0 +1,21 @@ +package org.influxdb; + +/** + * A wrapper for various exceptions caused while interacting with InfluxDB. + * + * @author Simon Legner + */ +public class InfluxDBException extends RuntimeException { + + public InfluxDBException(final String message) { + super(message); + } + + public InfluxDBException(final String message, final Throwable cause) { + super(message, cause); + } + + public InfluxDBException(final Throwable cause) { + super(cause); + } +} diff --git a/src/main/java/org/influxdb/InfluxDBIOException.java b/src/main/java/org/influxdb/InfluxDBIOException.java new file mode 100644 index 000000000..0a6858c76 --- /dev/null +++ b/src/main/java/org/influxdb/InfluxDBIOException.java @@ -0,0 +1,15 @@ +package org.influxdb; + +import java.io.IOException; + +/** + * A wrapper for {@link IOException} caused while interacting with InfluxDB. + * + * @author Simon Legner + */ +public class InfluxDBIOException extends InfluxDBException { + + public InfluxDBIOException(final IOException cause) { + super(cause); + } +} diff --git a/src/main/java/org/influxdb/dto/Query.java b/src/main/java/org/influxdb/dto/Query.java index 6305c5942..cab423bff 100644 --- a/src/main/java/org/influxdb/dto/Query.java +++ b/src/main/java/org/influxdb/dto/Query.java @@ -105,7 +105,7 @@ public static String encode(final String command) { try { return URLEncoder.encode(command, StandardCharsets.UTF_8.name()); } catch (UnsupportedEncodingException e) { - throw new RuntimeException(e); + throw new IllegalStateException("Every JRE must support UTF-8", e); } } } diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index 884787cab..c33a59214 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -10,6 +10,8 @@ import com.squareup.moshi.Moshi; import org.influxdb.InfluxDB; +import org.influxdb.InfluxDBException; +import org.influxdb.InfluxDBIOException; import org.influxdb.dto.BatchPoints; import org.influxdb.dto.Point; import org.influxdb.dto.Pong; @@ -99,7 +101,7 @@ private InetAddress parseHostAddress(final String url) { try { return InetAddress.getByName(HttpUrl.parse(url).host()); } catch (UnknownHostException e) { - throw new RuntimeException(e); + throw new InfluxDBIOException(e); } } @@ -221,7 +223,7 @@ public Pong ping() { pong.setResponseTime(watch.elapsed(TimeUnit.MILLISECONDS)); return pong; } catch (IOException e) { - throw new RuntimeException(e); + throw new InfluxDBIOException(e); } } @@ -304,7 +306,7 @@ public void write(final int udpPort, final String records) { try { datagramSocket.send(new DatagramPacket(bytes, bytes.length, hostAddress, udpPort)); } catch (IOException e) { - throw new RuntimeException(e); + throw new InfluxDBIOException(e); } } @@ -315,7 +317,7 @@ private void initialDatagramSocket() { try { datagramSocket = new DatagramSocket(); } catch (SocketException e) { - throw new RuntimeException(e); + throw new InfluxDBIOException(e); } } } @@ -354,7 +356,7 @@ public QueryResult query(final Query query) { public void query(final Query query, final int chunkSize, final Consumer consumer) { if (version().startsWith("0.") || version().startsWith("1.0")) { - throw new RuntimeException("chunking not supported"); + throw new UnsupportedOperationException("chunking not supported"); } Call call = this.influxDBService.query(this.username, this.password, @@ -374,20 +376,20 @@ public void onResponse(final Call call, final Response call, final Throwable t) { - throw new RuntimeException(t); + throw new InfluxDBException(t); } }); } @@ -463,10 +465,10 @@ private T execute(final Call call) { return response.body(); } try (ResponseBody errorBody = response.errorBody()) { - throw new RuntimeException(errorBody.string()); + throw new InfluxDBException(errorBody.string()); } } catch (IOException e) { - throw new RuntimeException(e); + throw new InfluxDBIOException(e); } } From 3cf468e752599b2419981d3352021c6468d35653 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Marty?= Date: Thu, 25 May 2017 11:19:22 +0200 Subject: [PATCH 008/745] revert automatic import reorganize --- src/main/java/org/influxdb/dto/BatchPoints.java | 15 ++++++++------- src/main/java/org/influxdb/dto/Point.java | 10 +++++----- 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/src/main/java/org/influxdb/dto/BatchPoints.java b/src/main/java/org/influxdb/dto/BatchPoints.java index c1bee4383..5a04f4cf2 100644 --- a/src/main/java/org/influxdb/dto/BatchPoints.java +++ b/src/main/java/org/influxdb/dto/BatchPoints.java @@ -1,18 +1,19 @@ package org.influxdb.dto; -import com.google.common.base.Preconditions; -import com.google.common.base.Strings; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.common.collect.Ordering; -import org.influxdb.InfluxDB.ConsistencyLevel; - import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.concurrent.TimeUnit; +import org.influxdb.InfluxDB.ConsistencyLevel; + +import com.google.common.base.Preconditions; +import com.google.common.base.Strings; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.common.collect.Ordering; + /** * {Purpose of This Type}. * diff --git a/src/main/java/org/influxdb/dto/Point.java b/src/main/java/org/influxdb/dto/Point.java index 647dbdbd4..cc7d0c134 100644 --- a/src/main/java/org/influxdb/dto/Point.java +++ b/src/main/java/org/influxdb/dto/Point.java @@ -1,10 +1,5 @@ package org.influxdb.dto; -import com.google.common.base.Preconditions; -import com.google.common.base.Strings; -import com.google.common.escape.Escaper; -import com.google.common.escape.Escapers; - import java.math.BigDecimal; import java.math.BigInteger; import java.text.NumberFormat; @@ -15,6 +10,11 @@ import java.util.TreeMap; import java.util.concurrent.TimeUnit; +import com.google.common.base.Preconditions; +import com.google.common.base.Strings; +import com.google.common.escape.Escaper; +import com.google.common.escape.Escapers; + /** * Representation of a InfluxDB database Point. * From 69801af87e9b32ec43ab3d1f3d2b8532ff08cdbc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Marty?= Date: Thu, 25 May 2017 11:21:55 +0200 Subject: [PATCH 009/745] revert automatic indent --- src/main/java/org/influxdb/impl/InfluxDBImpl.java | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index 247adf543..fec81a4f9 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -279,13 +279,13 @@ public void write(final BatchPoints batchPoints) { public void write(final String database, final String retentionPolicy, final ConsistencyLevel consistency, final TimeUnit precision, final String records) { execute(this.influxDBService.writePoints( - this.username, - this.password, - database, - retentionPolicy, - TimeUtil.toTimePrecision(precision), - consistency.value(), - RequestBody.create(MEDIA_TYPE_STRING, records))); + this.username, + this.password, + database, + retentionPolicy, + TimeUtil.toTimePrecision(precision), + consistency.value(), + RequestBody.create(MEDIA_TYPE_STRING, records))); } @Override From 8bec5b80d5bc6c471c5bee0e13e39949c7494107 Mon Sep 17 00:00:00 2001 From: Andy Feller Date: Tue, 30 May 2017 11:34:23 -0400 Subject: [PATCH 010/745] Simplify the write methods, ignore maven directory - Added optional state to InfluxDB interface and implementation to provide database, retention policy, and consistency - Added new write() methods based on non-batch point calls to remove need to specify database, retention policy, and consistency - Added tests around new write() methods - Added .m2 directory to git ignore --- .gitignore | 3 +- src/main/java/org/influxdb/InfluxDB.java | 48 ++++++++++++++ .../java/org/influxdb/impl/InfluxDBImpl.java | 48 ++++++++++++++ src/test/java/org/influxdb/InfluxDBTest.java | 65 +++++++++++++++++++ 4 files changed, 163 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index d2ada48e7..fed44b33d 100644 --- a/.gitignore +++ b/.gitignore @@ -5,4 +5,5 @@ target/ .settings/ test-output/ .idea/ -*iml \ No newline at end of file +*iml +.m2/ diff --git a/src/main/java/org/influxdb/InfluxDB.java b/src/main/java/org/influxdb/InfluxDB.java index 19cd6b956..0db309e46 100644 --- a/src/main/java/org/influxdb/InfluxDB.java +++ b/src/main/java/org/influxdb/InfluxDB.java @@ -113,6 +113,28 @@ public String value() { */ public String version(); + /** + * Write a single Point to the default database. + * + * @param point + * The point to write + */ + public void write(final Point point); + + /** + * Write a set of Points to the default database with the string records. + * + * @param records + */ + public void write(final String records); + + /** + * Write a set of Points to the default database with the list of string records. + * + * @param records + */ + public void write(final List records); + /** * Write a single Point to the database. * @@ -196,4 +218,30 @@ public String value() { */ public List describeDatabases(); + /** + * Set the consistency level which is used for writing points. + * + * @param consistency + * the consistency level to set. + * @return the InfluxDB instance to be able to use it in a fluent manner. + */ + public InfluxDB setConsistency(final ConsistencyLevel consistency); + + /** + * Set the database which is used for writing points. + * + * @param database + * the database to set. + * @return the InfluxDB instance to be able to use it in a fluent manner. + */ + public InfluxDB setDatabase(final String database); + + /** + * Set the retention policy which is used for writing points. + * + * @param retentionPolicy + * the retention policy to set. + * @return the InfluxDB instance to be able to use it in a fluent manner. + */ + public InfluxDB setRetentionPolicy(final String retentionPolicy); } diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index f17d064ea..0f5db96ee 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -50,6 +50,9 @@ public class InfluxDBImpl implements InfluxDB { private final AtomicLong batchedCount = new AtomicLong(); private final HttpLoggingInterceptor loggingInterceptor; private LogLevel logLevel = LogLevel.NONE; + private String database; + private String retentionPolicy = "autogen"; + private ConsistencyLevel consistency = ConsistencyLevel.ONE; public InfluxDBImpl(final String url, final String username, final String password, final OkHttpClient.Builder client) { @@ -66,6 +69,15 @@ public InfluxDBImpl(final String url, final String username, final String passwo this.influxDBService = this.retrofit.create(InfluxDBService.class); } + public InfluxDBImpl(final String url, final String username, final String password, + final OkHttpClient.Builder client, final String database, final String retentionPolicy, final ConsistencyLevel consistency) { + this(url, username, password, client); + + setConsistency(consistency); + setDatabase(database); + setRetentionPolicy(retentionPolicy); + } + @Override public InfluxDB setLogLevel(final LogLevel logLevel) { switch (logLevel) { @@ -146,6 +158,21 @@ public String version() { return ping().getVersion(); } + @Override + public void write(Point point) { + write(database, retentionPolicy, point); + } + + @Override + public void write(String records) { + write(database, retentionPolicy, consistency, records); + } + + @Override + public void write(List records) { + write(database, retentionPolicy, consistency, records); + } + @Override public void write(final String database, final String retentionPolicy, final Point point) { if (this.batchEnabled.get()) { @@ -256,6 +283,27 @@ public List describeDatabases() { return databases; } + @Override + public InfluxDB setConsistency(ConsistencyLevel consistency) { + + this.consistency = consistency; + return this; + } + + @Override + public InfluxDB setDatabase(String database) { + + this.database = database; + return this; + } + + @Override + public InfluxDB setRetentionPolicy(String retentionPolicy) { + + this.retentionPolicy = retentionPolicy; + return this; + } + private T execute(Call call) { try { Response response = call.execute(); diff --git a/src/test/java/org/influxdb/InfluxDBTest.java b/src/test/java/org/influxdb/InfluxDBTest.java index 6215ea15a..0971b2ab6 100644 --- a/src/test/java/org/influxdb/InfluxDBTest.java +++ b/src/test/java/org/influxdb/InfluxDBTest.java @@ -147,6 +147,23 @@ public void testWriteStringData() { this.influxDB.deleteDatabase(dbName); } + /** + * Test writing to the database using string protocol with simpler interface. + */ + @Test + public void testWriteStringDataSimple() { + String dbName = "write_unittest_" + System.currentTimeMillis(); + this.influxDB.createDatabase(dbName); + String rp = TestUtils.defaultRetentionPolicy(this.influxDB.version()); + this.influxDB.setDatabase(dbName); + this.influxDB.setRetentionPolicy(rp); + this.influxDB.write("cpu,atag=test idle=90,usertime=9,system=1"); + Query query = new Query("SELECT * FROM cpu GROUP BY *", dbName); + QueryResult result = this.influxDB.query(query); + Assert.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); + this.influxDB.deleteDatabase(dbName); + } + /** * Test writing multiple records to the database using string protocol. */ @@ -167,6 +184,28 @@ public void testWriteMultipleStringData() { this.influxDB.deleteDatabase(dbName); } + /** + * Test writing multiple records to the database using string protocol with simpler interface. + */ + @Test + public void testWriteMultipleStringDataSimple() { + String dbName = "write_unittest_" + System.currentTimeMillis(); + this.influxDB.createDatabase(dbName); + String rp = TestUtils.defaultRetentionPolicy(this.influxDB.version()); + this.influxDB.setDatabase(dbName); + this.influxDB.setRetentionPolicy(rp); + + this.influxDB.write("cpu,atag=test1 idle=100,usertime=10,system=1\ncpu,atag=test2 idle=200,usertime=20,system=2\ncpu,atag=test3 idle=300,usertime=30,system=3"); + Query query = new Query("SELECT * FROM cpu GROUP BY *", dbName); + QueryResult result = this.influxDB.query(query); + + Assert.assertEquals(result.getResults().get(0).getSeries().size(), 3); + Assert.assertEquals(result.getResults().get(0).getSeries().get(0).getTags().get("atag"), "test1"); + Assert.assertEquals(result.getResults().get(0).getSeries().get(1).getTags().get("atag"), "test2"); + Assert.assertEquals(result.getResults().get(0).getSeries().get(2).getTags().get("atag"), "test3"); + this.influxDB.deleteDatabase(dbName); + } + /** * Test writing multiple separate records to the database using string protocol. */ @@ -191,6 +230,32 @@ public void testWriteMultipleStringDataLines() { this.influxDB.deleteDatabase(dbName); } + /** + * Test writing multiple separate records to the database using string protocol with simpler interface. + */ + @Test + public void testWriteMultipleStringDataLinesSimple() { + String dbName = "write_unittest_" + System.currentTimeMillis(); + this.influxDB.createDatabase(dbName); + String rp = TestUtils.defaultRetentionPolicy(this.influxDB.version()); + this.influxDB.setDatabase(dbName); + this.influxDB.setRetentionPolicy(rp); + + this.influxDB.write(Arrays.asList( + "cpu,atag=test1 idle=100,usertime=10,system=1", + "cpu,atag=test2 idle=200,usertime=20,system=2", + "cpu,atag=test3 idle=300,usertime=30,system=3" + )); + Query query = new Query("SELECT * FROM cpu GROUP BY *", dbName); + QueryResult result = this.influxDB.query(query); + + Assert.assertEquals(result.getResults().get(0).getSeries().size(), 3); + Assert.assertEquals(result.getResults().get(0).getSeries().get(0).getTags().get("atag"), "test1"); + Assert.assertEquals(result.getResults().get(0).getSeries().get(1).getTags().get("atag"), "test2"); + Assert.assertEquals(result.getResults().get(0).getSeries().get(2).getTags().get("atag"), "test3"); + this.influxDB.deleteDatabase(dbName); + } + /** * Test that creating database which name is composed of numbers only works */ From 165d8592b5f7f272056f7967866a084d4b47a79d Mon Sep 17 00:00:00 2001 From: Andy Feller Date: Tue, 30 May 2017 11:44:17 -0400 Subject: [PATCH 011/745] updated readme to highlight new simpler write() methods --- README.md | 32 +++++++++++++++++++++++++++++++- 1 file changed, 31 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 9d8d25fd5..c698fa1f2 100644 --- a/README.md +++ b/README.md @@ -72,6 +72,36 @@ influxDB.query(query); influxDB.deleteDatabase(dbName); ``` +If all of your points are written to the same database and retention policy, the simpler write() methods can be used. + +```java +InfluxDB influxDB = InfluxDBFactory.connect("http://172.17.0.2:8086", "root", "root"); +String dbName = "aTimeSeries"; +influxDB.createDatabase(dbName); +influxDB.setDatabase(dbName); +influxDB.setRetentionPolicy("autogen"); + +// Flush every 2000 Points, at least every 100ms +influxDB.enableBatch(2000, 100, TimeUnit.MILLISECONDS); + +influxDB.write(Point.measurement("cpu") + .time(System.currentTimeMillis(), TimeUnit.MILLISECONDS) + .addField("idle", 90L) + .addField("user", 9L) + .addField("system", 1L) + .build()); + +influxDB.write(Point.measurement("disk") + .time(System.currentTimeMillis(), TimeUnit.MILLISECONDS) + .addField("used", 80L) + .addField("free", 1L) + .build()); + +Query query = new Query("SELECT idle FROM cpu", dbName); +influxDB.query(query); +influxDB.deleteDatabase(dbName); +``` + ### Changes in 2.4 influxdb-java now uses okhttp3 and retrofit2. As a result, you can now pass an ``OkHttpClient.Builder`` to the ``InfluxDBFactory.connect`` if you wish to add more interceptors, etc, to OkHttp. @@ -87,7 +117,7 @@ that allow this to be specified (default is still GET). org.influxdb influxdb-java - 2.3 + 2.4 ``` From 1e45e3c74e5189e374c9be6008d4c74cc6c444e9 Mon Sep 17 00:00:00 2001 From: Doug Melton Date: Wed, 31 May 2017 00:43:11 -0700 Subject: [PATCH 012/745] Added gradle usage --- README.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.md b/README.md index 94e1eb2e2..730d27360 100644 --- a/README.md +++ b/README.md @@ -128,6 +128,10 @@ The latest version for maven dependence: 2.5 ``` +Or when using with gradle: +```groovy +compile 'org.influxdb:influxdb-java:2.5' +``` For version change history have a look at [ChangeLog](https://github.com/influxdata/influxdb-java/blob/master/CHANGELOG.md). From d27c4dbb139f1cd4eddbb5e988e30464eef20703 Mon Sep 17 00:00:00 2001 From: Brett Wooldridge Date: Thu, 1 Jun 2017 18:22:02 +0900 Subject: [PATCH 013/745] Simple but significant performance improvements. --- src/main/java/org/influxdb/dto/Point.java | 84 +++++++++---------- .../java/org/influxdb/impl/InfluxDBImpl.java | 22 ++--- 2 files changed, 50 insertions(+), 56 deletions(-) diff --git a/src/main/java/org/influxdb/dto/Point.java b/src/main/java/org/influxdb/dto/Point.java index d1dd78a7c..a2af3bf0c 100644 --- a/src/main/java/org/influxdb/dto/Point.java +++ b/src/main/java/org/influxdb/dto/Point.java @@ -37,7 +37,16 @@ public class Point { .addEscape(',', "\\,") .addEscape('=', "\\=") .build(); + private static final int MAX_FRACTION_DIGITS = 340; + private static final ThreadLocal NUMBER_FORMATTER = + ThreadLocal.withInitial(() -> { + NumberFormat numberFormat = NumberFormat.getInstance(Locale.ENGLISH); + numberFormat.setMaximumFractionDigits(MAX_FRACTION_DIGITS); + numberFormat.setGroupingUsed(false); + numberFormat.setMinimumFractionDigits(1); + return numberFormat; + }); Point() { } @@ -121,20 +130,15 @@ public Builder field(final String field, Object value) { if (value instanceof Number) { if (value instanceof Byte) { value = ((Byte) value).doubleValue(); - } - if (value instanceof Short) { + } else if (value instanceof Short) { value = ((Short) value).doubleValue(); - } - if (value instanceof Integer) { + } else if (value instanceof Integer) { value = ((Integer) value).doubleValue(); - } - if (value instanceof Long) { + } else if (value instanceof Long) { value = ((Long) value).doubleValue(); - } - if (value instanceof BigInteger) { + } else if (value instanceof BigInteger) { value = ((BigInteger) value).doubleValue(); } - } fields.put(field, value); return this; @@ -320,69 +324,59 @@ public String toString() { * @return the String without newLine. */ public String lineProtocol() { - final StringBuilder sb = new StringBuilder(); + final StringBuilder sb = new StringBuilder(128); sb.append(KEY_ESCAPER.escape(this.measurement)); - sb.append(concatenatedTags()); - sb.append(concatenateFields()); - sb.append(formatedTime()); + + concatenatedTags(sb); + concatenatedFields(sb); + formatedTime(sb); + return sb.toString(); } - private StringBuilder concatenatedTags() { - final StringBuilder sb = new StringBuilder(); + private void concatenatedTags(final StringBuilder sb) { for (Entry tag : this.tags.entrySet()) { - sb.append(",") + sb.append(',') .append(KEY_ESCAPER.escape(tag.getKey())) - .append("=") + .append('=') .append(KEY_ESCAPER.escape(tag.getValue())); } - sb.append(" "); - return sb; + sb.append(' '); } - private StringBuilder concatenateFields() { - final StringBuilder sb = new StringBuilder(); - final int fieldCount = this.fields.size(); - int loops = 0; - - NumberFormat numberFormat = NumberFormat.getInstance(Locale.ENGLISH); - numberFormat.setMaximumFractionDigits(MAX_FRACTION_DIGITS); - numberFormat.setGroupingUsed(false); - numberFormat.setMinimumFractionDigits(1); - + private void concatenatedFields(final StringBuilder sb) { for (Entry field : this.fields.entrySet()) { - loops++; Object value = field.getValue(); if (value == null) { continue; } - sb.append(KEY_ESCAPER.escape(field.getKey())).append("="); - if (value instanceof String) { - String stringValue = (String) value; - sb.append("\"").append(FIELD_ESCAPER.escape(stringValue)).append("\""); - } else if (value instanceof Number) { + sb.append(KEY_ESCAPER.escape(field.getKey())).append('='); + if (value instanceof Number) { if (value instanceof Double || value instanceof Float || value instanceof BigDecimal) { - sb.append(numberFormat.format(value)); + sb.append(NUMBER_FORMATTER.get().format(value)); } else { - sb.append(value).append("i"); + sb.append(value).append('i'); } + } else if (value instanceof String) { + String stringValue = (String) value; + sb.append('"').append(FIELD_ESCAPER.escape(stringValue)).append('"'); } else { sb.append(value); } - if (loops < fieldCount) { - sb.append(","); - } + sb.append(','); } - return sb; + // efficiently chop off the trailing comma + int lengthMinusOne = sb.length() - 1; + if (sb.charAt(lengthMinusOne) == ',') { + sb.setLength(lengthMinusOne); + } } - private StringBuilder formatedTime() { - final StringBuilder sb = new StringBuilder(); - sb.append(" ").append(TimeUnit.NANOSECONDS.convert(this.time, this.precision)); - return sb; + private void formatedTime(final StringBuilder sb) { + sb.append(' ').append(TimeUnit.NANOSECONDS.convert(this.time, this.precision)); } } diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index 884787cab..e41732865 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -46,7 +46,7 @@ import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.LongAdder; import java.util.function.BiConsumer; import java.util.function.Consumer; @@ -67,9 +67,9 @@ public class InfluxDBImpl implements InfluxDB { private final InfluxDBService influxDBService; private BatchProcessor batchProcessor; private final AtomicBoolean batchEnabled = new AtomicBoolean(false); - private final AtomicLong writeCount = new AtomicLong(); - private final AtomicLong unBatchedCount = new AtomicLong(); - private final AtomicLong batchedCount = new AtomicLong(); + private final LongAdder writeCount = new LongAdder(); + private final LongAdder unBatchedCount = new LongAdder(); + private final LongAdder batchedCount = new LongAdder(); private volatile DatagramSocket datagramSocket; private final HttpLoggingInterceptor loggingInterceptor; private final GzipRequestInterceptor gzipRequestInterceptor; @@ -190,8 +190,8 @@ public void disableBatch() { this.batchProcessor.flushAndShutdown(); if (this.logLevel != LogLevel.NONE) { System.out.println( - "total writes:" + this.writeCount.get() - + " unbatched:" + this.unBatchedCount.get() + "total writes:" + this.writeCount + + " unbatched:" + this.unBatchedCount + " batchPoints:" + this.batchedCount); } } @@ -240,9 +240,9 @@ public void write(final String database, final String retentionPolicy, final Poi .retentionPolicy(retentionPolicy).build(); batchPoints.point(point); this.write(batchPoints); - this.unBatchedCount.incrementAndGet(); + this.unBatchedCount.increment(); } - this.writeCount.incrementAndGet(); + this.writeCount.increment(); } /** @@ -255,14 +255,14 @@ public void write(final int udpPort, final Point point) { this.batchProcessor.put(batchEntry); } else { this.write(udpPort, point.lineProtocol()); - this.unBatchedCount.incrementAndGet(); + this.unBatchedCount.increment(); } - this.writeCount.incrementAndGet(); + this.writeCount.increment(); } @Override public void write(final BatchPoints batchPoints) { - this.batchedCount.addAndGet(batchPoints.getPoints().size()); + this.batchedCount.add(batchPoints.getPoints().size()); RequestBody lineProtocol = RequestBody.create(MEDIA_TYPE_STRING, batchPoints.lineProtocol()); execute(this.influxDBService.writePoints( this.username, From 901bf4fc24cd27015dc157dc3beb143b8f3d05d7 Mon Sep 17 00:00:00 2001 From: Brett Wooldridge Date: Fri, 2 Jun 2017 01:56:05 +0900 Subject: [PATCH 014/745] Cache StringBuilders per-measurement in ThreadLocal maps. Much faster and generates > 800MB less garbage in the PerformanceTests.maxWritePointsPerformance() test compared to the master branch. --- src/main/java/org/influxdb/dto/Point.java | 24 +++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/src/main/java/org/influxdb/dto/Point.java b/src/main/java/org/influxdb/dto/Point.java index a2af3bf0c..2a6251037 100644 --- a/src/main/java/org/influxdb/dto/Point.java +++ b/src/main/java/org/influxdb/dto/Point.java @@ -3,6 +3,7 @@ import java.math.BigDecimal; import java.math.BigInteger; import java.text.NumberFormat; +import java.util.HashMap; import java.util.Locale; import java.util.Map; import java.util.Map.Entry; @@ -48,6 +49,9 @@ public class Point { return numberFormat; }); + private static final ThreadLocal> CACHED_STRINGBUILDERS = + ThreadLocal.withInitial(HashMap::new); + Point() { } @@ -324,8 +328,10 @@ public String toString() { * @return the String without newLine. */ public String lineProtocol() { - final StringBuilder sb = new StringBuilder(128); - sb.append(KEY_ESCAPER.escape(this.measurement)); + final StringBuilder sb = CACHED_STRINGBUILDERS + .get() + .computeIfAbsent(this.measurement, MeasurementStringBuilder::new) + .resetForUse(); concatenatedTags(sb); concatenatedFields(sb); @@ -379,4 +385,18 @@ private void formatedTime(final StringBuilder sb) { sb.append(' ').append(TimeUnit.NANOSECONDS.convert(this.time, this.precision)); } + private static class MeasurementStringBuilder { + private final StringBuilder sb = new StringBuilder(128); + private final int length; + + MeasurementStringBuilder(final String measurement) { + this.sb.append(KEY_ESCAPER.escape(measurement)); + this.length = sb.length(); + } + + StringBuilder resetForUse() { + sb.setLength(length); + return sb; + } + } } From 8279ac8167d947f64dd8430761c65950cdad07b3 Mon Sep 17 00:00:00 2001 From: Miguel Fonseca Date: Fri, 2 Jun 2017 13:05:15 +0100 Subject: [PATCH 015/745] Fix NPE when failing to parse an invalid URL This is required since HttpUrl.parse() returns null on parse failure. With this change the Factory now throws an IllegalArgumentException with proper description. --- src/main/java/org/influxdb/impl/InfluxDBImpl.java | 8 +++++++- src/test/java/org/influxdb/InfluxDBFactoryTest.java | 5 +++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index 884787cab..5a28b38ee 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -96,8 +96,14 @@ public InfluxDBImpl(final String url, final String username, final String passwo } private InetAddress parseHostAddress(final String url) { + HttpUrl httpUrl = HttpUrl.parse(url); + + if (httpUrl == null) { + throw new IllegalArgumentException("Unable to parse url: " + url); + } + try { - return InetAddress.getByName(HttpUrl.parse(url).host()); + return InetAddress.getByName(httpUrl.host()); } catch (UnknownHostException e) { throw new RuntimeException(e); } diff --git a/src/test/java/org/influxdb/InfluxDBFactoryTest.java b/src/test/java/org/influxdb/InfluxDBFactoryTest.java index e2a930aa6..ec1455c1c 100644 --- a/src/test/java/org/influxdb/InfluxDBFactoryTest.java +++ b/src/test/java/org/influxdb/InfluxDBFactoryTest.java @@ -38,4 +38,9 @@ public void testCreateInfluxDBInstanceWithClientAndWithoutUserNameAndPassword() InfluxDB influxDB = InfluxDBFactory.connect("http://" + TestUtils.getInfluxIP() + ":" + TestUtils.getInfluxPORT(true), new OkHttpClient.Builder()); verifyInfluxDBInstance(influxDB); } + + @Test(expected = IllegalArgumentException.class) + public void shouldThrowIllegalArgumentWithInvalidUrl() { + InfluxDBFactory.connect("invalidUrl"); + } } From 3183c146695413a9a8691f0abad516c6bcd839ba Mon Sep 17 00:00:00 2001 From: ivankudibal Date: Mon, 12 Jun 2017 17:45:43 +0200 Subject: [PATCH 016/745] release 2.6 --- pom.xml | 71 ++++++++++++++++++- src/main/java/org/influxdb/InfluxDB.java | 12 ++-- src/main/java/org/influxdb/impl/TimeUtil.java | 2 +- 3 files changed, 76 insertions(+), 9 deletions(-) diff --git a/pom.xml b/pom.xml index 0aaeaafe7..36952ead7 100644 --- a/pom.xml +++ b/pom.xml @@ -1,16 +1,17 @@ + 4.0.0 org.influxdb influxdb-java jar - 2.5-SNAPSHOT + 2.6 influxdb java bindings Java API to access the InfluxDB REST API http://www.influxdb.org @@ -57,6 +58,19 @@ + + + + ossrh + https://oss.sonatype.org/content/repositories/snapshots + + + ossrh + https://oss.sonatype.org/service/local/staging/deploy/maven2/ + + + + @@ -82,6 +96,59 @@ + + + org.sonatype.plugins + nexus-staging-maven-plugin + 1.6.7 + true + + ossrh + https://oss.sonatype.org/ + true + + + + org.apache.maven.plugins + maven-source-plugin + 2.2.1 + + + attach-sources + + jar-no-fork + + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + 2.9.1 + + + attach-javadocs + + jar + + + + + + org.apache.maven.plugins + maven-gpg-plugin + 1.5 + + + sign-artifacts + verify + + sign + + + + org.jacoco jacoco-maven-plugin diff --git a/src/main/java/org/influxdb/InfluxDB.java b/src/main/java/org/influxdb/InfluxDB.java index cad445c05..982eabe07 100644 --- a/src/main/java/org/influxdb/InfluxDB.java +++ b/src/main/java/org/influxdb/InfluxDB.java @@ -104,10 +104,10 @@ public String value() { /** * Enable batching of single Point writes as - * {@link #enableBatch(int, int, TimeUnit, ThreadFactory, BiConsumer, Throwable>)} + * {@link #enableBatch(int, int, TimeUnit, ThreadFactory, BiConsumer)} * using with a exceptionHandler that does nothing. * - * @see #enableBatch(int, int, TimeUnit, ThreadFactory, BiConsumer, Throwable>) + * @see #enableBatch(int, int, TimeUnit, ThreadFactory, BiConsumer) */ public InfluxDB enableBatch(final int actions, final int flushDuration, final TimeUnit flushDurationTimeUnit, final ThreadFactory threadFactory); @@ -179,9 +179,9 @@ public InfluxDB enableBatch(final int actions, final int flushDuration, final Ti public void write(final int udpPort, final Point point); /** - * Write a set of Points to the influxdb database with the new (>= 0.9.0rc32) lineprotocol. + * Write a set of Points to the influxdb database with the new (>= 0.9.0rc32) lineprotocol. * - * {@linkplain "https://github.com/influxdb/influxdb/pull/2696"} + * @see 2696 * * @param batchPoints */ @@ -190,7 +190,7 @@ public InfluxDB enableBatch(final int actions, final int flushDuration, final Ti /** * Write a set of Points to the influxdb database with the string records. * - * {@linkplain "https://github.com/influxdb/influxdb/pull/2696"} + * @see 2696 * * @param records */ @@ -200,7 +200,7 @@ public void write(final String database, final String retentionPolicy, /** * Write a set of Points to the influxdb database with the list of string records. * - * {@linkplain "https://github.com/influxdb/influxdb/pull/2696"} + * @see 2696 * * @param records */ diff --git a/src/main/java/org/influxdb/impl/TimeUtil.java b/src/main/java/org/influxdb/impl/TimeUtil.java index 61492eb11..079e50b6a 100644 --- a/src/main/java/org/influxdb/impl/TimeUtil.java +++ b/src/main/java/org/influxdb/impl/TimeUtil.java @@ -69,7 +69,7 @@ public static String toTimePrecision(final TimeUnit t) { /** * convert a unix epoch time to timestamp used by influxdb. * this can then be used in query expressions against influxdb's time column like so: - * influxDB.query(new Query("SELECT * FROM some_measurement WHERE time >= '" + * influxDB.query(new Query("SELECT * FROM some_measurement WHERE time >= '" * + toInfluxDBTimeFormat(timeStart) + "'", some_database)) * influxdb time format example: 2016-10-31T06:52:20.020Z * From c9be75a4083514b50224d5856c45aa9a7300cf46 Mon Sep 17 00:00:00 2001 From: ivankudibal Date: Mon, 12 Jun 2017 17:56:59 +0200 Subject: [PATCH 017/745] removed the comment we do not need it anymore as push to maven worked --- pom.xml | 7 ------- 1 file changed, 7 deletions(-) diff --git a/pom.xml b/pom.xml index 36952ead7..2859f4012 100644 --- a/pom.xml +++ b/pom.xml @@ -1,12 +1,5 @@ - 4.0.0 org.influxdb influxdb-java From 6df391351434a7e1131173085d72a1d02147e618 Mon Sep 17 00:00:00 2001 From: ivankudibal Date: Mon, 12 Jun 2017 19:41:43 +0200 Subject: [PATCH 018/745] removed gpg plugin in an attempt to make the build run succesfully --- pom.xml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pom.xml b/pom.xml index 2859f4012..1f2ef68f0 100644 --- a/pom.xml +++ b/pom.xml @@ -128,6 +128,7 @@ + org.jacoco jacoco-maven-plugin From 25f9a4a9f34b3cae8a0802738c987e8d10912d67 Mon Sep 17 00:00:00 2001 From: ivankudibal Date: Mon, 12 Jun 2017 19:54:09 +0200 Subject: [PATCH 019/745] increase the versions to be up to date, removed gpg plugin checked local build gpg plugin is missing keys that would be publicly shared and used for signing the artifacts --- pom.xml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pom.xml b/pom.xml index 1f2ef68f0..ac1721911 100644 --- a/pom.xml +++ b/pom.xml @@ -104,7 +104,7 @@ org.apache.maven.plugins maven-source-plugin - 2.2.1 + 3.0.1 attach-sources @@ -118,7 +118,7 @@ org.apache.maven.plugins maven-javadoc-plugin - 2.9.1 + 2.10.4 attach-javadocs @@ -128,11 +128,11 @@ - From e852a6add778080d6be4f64b05141bb502427d89 Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Fri, 17 Nov 2017 13:26:10 +0100 Subject: [PATCH 076/745] Update maven compile plugin from 3.6.1 -> 3.7.0, maven javadoc plugin from 2.10.4 -> 3.0.0-M1 --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 9a1024d9c..2f8f644a5 100644 --- a/pom.xml +++ b/pom.xml @@ -70,7 +70,7 @@ org.apache.maven.plugins maven-compiler-plugin - 3.6.1 + 3.7.0 1.8 1.8 @@ -143,7 +143,7 @@ org.apache.maven.plugins maven-javadoc-plugin - 2.10.4 + 3.0.0-M1 attach-javadocs From 8098e1ef87fcd10661fe0452dd41cd90717e356a Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Mon, 20 Nov 2017 10:42:18 +0100 Subject: [PATCH 077/745] Update okhttp from 3.9.0 -> 3.9.1 --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 2f8f644a5..a4e09ce9e 100644 --- a/pom.xml +++ b/pom.xml @@ -248,12 +248,12 @@ com.squareup.okhttp3 okhttp - 3.9.0 + 3.9.1 com.squareup.okhttp3 logging-interceptor - 3.9.0 + 3.9.1 From 619af3480ee2d8c2110d8eab85edbf232f3249a3 Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Mon, 20 Nov 2017 18:51:07 +0100 Subject: [PATCH 078/745] Migrate to JUnit5 --- pom.xml | 18 +- .../org/influxdb/InfluxDBFactoryTest.java | 97 ++++---- src/test/java/org/influxdb/InfluxDBTest.java | 210 ++++++++++-------- .../java/org/influxdb/PerformanceTests.java | 31 +-- src/test/java/org/influxdb/TicketTest.java | 12 +- .../java/org/influxdb/dto/BatchPointTest.java | 2 +- src/test/java/org/influxdb/dto/PointTest.java | 35 ++- src/test/java/org/influxdb/dto/QueryTest.java | 2 +- .../org/influxdb/impl/BatchProcessorTest.java | 25 ++- .../influxdb/impl/ChunkingExceptionTest.java | 8 +- .../impl/InfluxDBResultMapperTest.java | 95 ++++---- .../org/influxdb/impl/PreconditionsTest.java | 39 ++-- .../java/org/influxdb/impl/TimeUtilTest.java | 22 +- 13 files changed, 336 insertions(+), 260 deletions(-) diff --git a/pom.xml b/pom.xml index a4e09ce9e..979f63a01 100644 --- a/pom.xml +++ b/pom.xml @@ -210,9 +210,21 @@ - junit - junit - 4.12 + org.junit.jupiter + junit-jupiter-engine + 5.0.2 + test + + + org.junit.platform + junit-platform-runner + 1.0.2 + test + + + org.hamcrest + hamcrest-all + 1.3 test diff --git a/src/test/java/org/influxdb/InfluxDBFactoryTest.java b/src/test/java/org/influxdb/InfluxDBFactoryTest.java index ec1455c1c..99291b522 100644 --- a/src/test/java/org/influxdb/InfluxDBFactoryTest.java +++ b/src/test/java/org/influxdb/InfluxDBFactoryTest.java @@ -1,46 +1,51 @@ -package org.influxdb; - -import org.influxdb.dto.Pong; -import org.junit.Assert; -import org.junit.Test; - -import okhttp3.OkHttpClient; - -/** - * Test the InfluxDB Factory API. - * - * @author fujian1115 [at] gmail.com - * - */ -public class InfluxDBFactoryTest { - - /** - * Test for a {@link InfluxDBFactory #connect(String)}. - */ - @Test - public void testCreateInfluxDBInstanceWithoutUserNameAndPassword() { - InfluxDB influxDB = InfluxDBFactory.connect("http://" + TestUtils.getInfluxIP() + ":" + TestUtils.getInfluxPORT(true)); - verifyInfluxDBInstance(influxDB); - } - - private void verifyInfluxDBInstance(InfluxDB influxDB) { - Assert.assertNotNull(influxDB); - Pong pong = influxDB.ping(); - Assert.assertNotNull(pong); - Assert.assertNotEquals(pong.getVersion(), "unknown"); - } - - /** - * Test for a {@link InfluxDBFactory #connect(String, okhttp3.OkHttpClient.Builder)}. - */ - @Test - public void testCreateInfluxDBInstanceWithClientAndWithoutUserNameAndPassword() { - InfluxDB influxDB = InfluxDBFactory.connect("http://" + TestUtils.getInfluxIP() + ":" + TestUtils.getInfluxPORT(true), new OkHttpClient.Builder()); - verifyInfluxDBInstance(influxDB); - } - - @Test(expected = IllegalArgumentException.class) - public void shouldThrowIllegalArgumentWithInvalidUrl() { - InfluxDBFactory.connect("invalidUrl"); - } -} +package org.influxdb; + +import org.influxdb.dto.Pong; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.platform.runner.JUnitPlatform; +import org.junit.runner.RunWith; + +import okhttp3.OkHttpClient; + +/** + * Test the InfluxDB Factory API. + * + * @author fujian1115 [at] gmail.com + * + */ +@RunWith(JUnitPlatform.class) +public class InfluxDBFactoryTest { + + /** + * Test for a {@link InfluxDBFactory #connect(String)}. + */ + @Test + public void testCreateInfluxDBInstanceWithoutUserNameAndPassword() { + InfluxDB influxDB = InfluxDBFactory.connect("http://" + TestUtils.getInfluxIP() + ":" + TestUtils.getInfluxPORT(true)); + verifyInfluxDBInstance(influxDB); + } + + private void verifyInfluxDBInstance(InfluxDB influxDB) { + Assertions.assertNotNull(influxDB); + Pong pong = influxDB.ping(); + Assertions.assertNotNull(pong); + Assertions.assertNotEquals(pong.getVersion(), "unknown"); + } + + /** + * Test for a {@link InfluxDBFactory #connect(String, okhttp3.OkHttpClient.Builder)}. + */ + @Test + public void testCreateInfluxDBInstanceWithClientAndWithoutUserNameAndPassword() { + InfluxDB influxDB = InfluxDBFactory.connect("http://" + TestUtils.getInfluxIP() + ":" + TestUtils.getInfluxPORT(true), new OkHttpClient.Builder()); + verifyInfluxDBInstance(influxDB); + } + + @Test + public void testShouldThrowIllegalArgumentWithInvalidUrl() { + Assertions.assertThrows(IllegalArgumentException.class,() -> { + InfluxDBFactory.connect("invalidUrl"); + }); + } +} diff --git a/src/test/java/org/influxdb/InfluxDBTest.java b/src/test/java/org/influxdb/InfluxDBTest.java index d3b973ff7..cd505dd63 100644 --- a/src/test/java/org/influxdb/InfluxDBTest.java +++ b/src/test/java/org/influxdb/InfluxDBTest.java @@ -19,11 +19,12 @@ import org.influxdb.dto.Query; import org.influxdb.dto.QueryResult; import org.influxdb.impl.InfluxDBImpl; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.platform.runner.JUnitPlatform; +import org.junit.runner.RunWith; import com.google.common.util.concurrent.Uninterruptibles; @@ -33,20 +34,20 @@ * @author stefan.majer [at] gmail.com * */ +@RunWith(JUnitPlatform.class) public class InfluxDBTest { private InfluxDB influxDB; private final static int UDP_PORT = 8089; private final static String UDP_DATABASE = "udp"; - @Rule public final ExpectedException exception = ExpectedException.none(); /** * Create a influxDB connection before all tests start. * * @throws InterruptedException * @throws IOException */ - @Before + @BeforeEach public void setUp() throws InterruptedException, IOException { this.influxDB = InfluxDBFactory.connect("http://" + TestUtils.getInfluxIP() + ":" + TestUtils.getInfluxPORT(true), "admin", "admin"); boolean influxDBstarted = false; @@ -73,8 +74,8 @@ public void setUp() throws InterruptedException, IOException { /** * delete UDP database after all tests end. */ - //@After - public void clearup(){ + @AfterEach + public void cleanup(){ this.influxDB.deleteDatabase(UDP_DATABASE); } @@ -84,8 +85,8 @@ public void clearup(){ @Test public void testPing() { Pong result = this.influxDB.ping(); - Assert.assertNotNull(result); - Assert.assertNotEquals(result.getVersion(), "unknown"); + Assertions.assertNotNull(result); + Assertions.assertNotEquals(result.getVersion(), "unknown"); } /** @@ -94,8 +95,8 @@ public void testPing() { @Test public void testVersion() { String version = this.influxDB.version(); - Assert.assertNotNull(version); - Assert.assertFalse(version.contains("unknown")); + Assertions.assertNotNull(version); + Assertions.assertFalse(version.contains("unknown")); } /** @@ -135,8 +136,8 @@ public void testDescribeDatabases() { this.influxDB.createDatabase(dbName); this.influxDB.describeDatabases(); List result = this.influxDB.describeDatabases(); - Assert.assertNotNull(result); - Assert.assertTrue(result.size() > 0); + Assertions.assertNotNull(result); + Assertions.assertTrue(result.size() > 0); boolean found = false; for (String database : result) { if (database.equals(dbName)) { @@ -145,7 +146,7 @@ public void testDescribeDatabases() { } } - Assert.assertTrue("It is expected that describeDataBases contents the newly create database.", found); + Assertions.assertTrue(found, "It is expected that describeDataBases contents the newly create database."); this.influxDB.deleteDatabase(dbName); } @@ -158,9 +159,9 @@ public void testDatabaseExists() { String notExistentdbName = "unittest_2"; this.influxDB.createDatabase(existentdbName); boolean checkDbExistence = this.influxDB.databaseExists(existentdbName); - Assert.assertTrue("It is expected that databaseExists return true for " + existentdbName + " database", checkDbExistence); + Assertions.assertTrue(checkDbExistence, "It is expected that databaseExists return true for " + existentdbName + " database"); checkDbExistence = this.influxDB.databaseExists(notExistentdbName); - Assert.assertFalse("It is expected that databaseExists return false for " + notExistentdbName + " database", checkDbExistence); + Assertions.assertFalse(checkDbExistence, "It is expected that databaseExists return false for " + notExistentdbName + " database"); this.influxDB.deleteDatabase(existentdbName); } @@ -186,7 +187,7 @@ public void testWrite() { this.influxDB.write(batchPoints); Query query = new Query("SELECT * FROM cpu GROUP BY *", dbName); QueryResult result = this.influxDB.query(query); - Assert.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); + Assertions.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); this.influxDB.deleteDatabase(dbName); } @@ -202,7 +203,7 @@ public void testSyncWritePointThroughUDP() { Uninterruptibles.sleepUninterruptibly(2, TimeUnit.SECONDS); Query query = new Query("SELECT * FROM " + measurement + " GROUP BY *", UDP_DATABASE); QueryResult result = this.influxDB.query(query); - Assert.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); + Assertions.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); } /** @@ -212,14 +213,14 @@ public void testSyncWritePointThroughUDP() { public void testAsyncWritePointThroughUDP() { this.influxDB.enableBatch(1, 1, TimeUnit.SECONDS); try{ - Assert.assertTrue(this.influxDB.isBatchEnabled()); + Assertions.assertTrue(this.influxDB.isBatchEnabled()); String measurement = TestUtils.getRandomMeasurement(); Point point = Point.measurement(measurement).tag("atag", "test").addField("used", 80L).addField("free", 1L).build(); this.influxDB.write(UDP_PORT, point); Uninterruptibles.sleepUninterruptibly(2, TimeUnit.SECONDS); Query query = new Query("SELECT * FROM " + measurement + " GROUP BY *", UDP_DATABASE); QueryResult result = this.influxDB.query(query); - Assert.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); + Assertions.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); }finally{ this.influxDB.disableBatch(); } @@ -229,15 +230,17 @@ public void testAsyncWritePointThroughUDP() { /** * Test the implementation of {@link InfluxDB#write(int, Point)}'s async support. */ - @Test(expected = RuntimeException.class) + @Test public void testAsyncWritePointThroughUDPFail() { this.influxDB.enableBatch(1, 1, TimeUnit.SECONDS); try{ - Assert.assertTrue(this.influxDB.isBatchEnabled()); + Assertions.assertTrue(this.influxDB.isBatchEnabled()); String measurement = TestUtils.getRandomMeasurement(); Point point = Point.measurement(measurement).tag("atag", "test").addField("used", 80L).addField("free", 1L).build(); Thread.currentThread().interrupt(); - this.influxDB.write(UDP_PORT, point); + Assertions.assertThrows(RuntimeException.class, () -> { + this.influxDB.write(UDP_PORT, point); + }); }finally{ this.influxDB.disableBatch(); } @@ -254,7 +257,7 @@ public void testWriteStringData() { this.influxDB.write(dbName, rp, InfluxDB.ConsistencyLevel.ONE, "cpu,atag=test idle=90,usertime=9,system=1"); Query query = new Query("SELECT * FROM cpu GROUP BY *", dbName); QueryResult result = this.influxDB.query(query); - Assert.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); + Assertions.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); this.influxDB.deleteDatabase(dbName); } @@ -271,7 +274,7 @@ public void testWriteStringDataSimple() { this.influxDB.write("cpu,atag=test idle=90,usertime=9,system=1"); Query query = new Query("SELECT * FROM cpu GROUP BY *", dbName); QueryResult result = this.influxDB.query(query); - Assert.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); + Assertions.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); this.influxDB.deleteDatabase(dbName); } @@ -286,7 +289,7 @@ public void testWriteStringDataThroughUDP() { Uninterruptibles.sleepUninterruptibly(2, TimeUnit.SECONDS); Query query = new Query("SELECT * FROM " + measurement + " GROUP BY *", UDP_DATABASE); QueryResult result = this.influxDB.query(query); - Assert.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); + Assertions.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); } /** @@ -302,10 +305,10 @@ public void testWriteMultipleStringDataThroughUDP() { Query query = new Query("SELECT * FROM " + measurement + " GROUP BY *", UDP_DATABASE); QueryResult result = this.influxDB.query(query); - Assert.assertEquals(3, result.getResults().get(0).getSeries().size()); - Assert.assertEquals(result.getResults().get(0).getSeries().get(0).getTags().get("atag"), "test1"); - Assert.assertEquals(result.getResults().get(0).getSeries().get(1).getTags().get("atag"), "test2"); - Assert.assertEquals(result.getResults().get(0).getSeries().get(2).getTags().get("atag"), "test3"); + Assertions.assertEquals(3, result.getResults().get(0).getSeries().size()); + Assertions.assertEquals(result.getResults().get(0).getSeries().get(0).getTags().get("atag"), "test1"); + Assertions.assertEquals(result.getResults().get(0).getSeries().get(1).getTags().get("atag"), "test2"); + Assertions.assertEquals(result.getResults().get(0).getSeries().get(2).getTags().get("atag"), "test3"); } /** @@ -323,10 +326,10 @@ public void testWriteMultipleStringDataLinesThroughUDP() { Query query = new Query("SELECT * FROM " + measurement + " GROUP BY *", UDP_DATABASE); QueryResult result = this.influxDB.query(query); - Assert.assertEquals(3, result.getResults().get(0).getSeries().size()); - Assert.assertEquals(result.getResults().get(0).getSeries().get(0).getTags().get("atag"), "test1"); - Assert.assertEquals(result.getResults().get(0).getSeries().get(1).getTags().get("atag"), "test2"); - Assert.assertEquals(result.getResults().get(0).getSeries().get(2).getTags().get("atag"), "test3"); + Assertions.assertEquals(3, result.getResults().get(0).getSeries().size()); + Assertions.assertEquals(result.getResults().get(0).getSeries().get(0).getTags().get("atag"), "test1"); + Assertions.assertEquals(result.getResults().get(0).getSeries().get(1).getTags().get("atag"), "test2"); + Assertions.assertEquals(result.getResults().get(0).getSeries().get(2).getTags().get("atag"), "test3"); } /** @@ -335,8 +338,8 @@ public void testWriteMultipleStringDataLinesThroughUDP() { * The message is larger than the maximum supported by the underlying transport: Datagram send failed * @throws Exception */ - @Test(expected = RuntimeException.class) - public void writeMultipleStringDataLinesOverUDPLimit() throws Exception { + @Test + public void testWriteMultipleStringDataLinesOverUDPLimit() throws Exception { //prepare data List lineProtocols = new ArrayList(); int i = 0; @@ -351,7 +354,9 @@ public void writeMultipleStringDataLinesOverUDPLimit() throws Exception { } } //write batch of string which size is over 64K - this.influxDB.write(UDP_PORT, lineProtocols); + Assertions.assertThrows(RuntimeException.class, () -> { + this.influxDB.write(UDP_PORT, lineProtocols); + }); } /** @@ -367,10 +372,10 @@ public void testWriteMultipleStringData() { Query query = new Query("SELECT * FROM cpu GROUP BY *", dbName); QueryResult result = this.influxDB.query(query); - Assert.assertEquals(result.getResults().get(0).getSeries().size(), 3); - Assert.assertEquals(result.getResults().get(0).getSeries().get(0).getTags().get("atag"), "test1"); - Assert.assertEquals(result.getResults().get(0).getSeries().get(1).getTags().get("atag"), "test2"); - Assert.assertEquals(result.getResults().get(0).getSeries().get(2).getTags().get("atag"), "test3"); + Assertions.assertEquals(result.getResults().get(0).getSeries().size(), 3); + Assertions.assertEquals(result.getResults().get(0).getSeries().get(0).getTags().get("atag"), "test1"); + Assertions.assertEquals(result.getResults().get(0).getSeries().get(1).getTags().get("atag"), "test2"); + Assertions.assertEquals(result.getResults().get(0).getSeries().get(2).getTags().get("atag"), "test3"); this.influxDB.deleteDatabase(dbName); } @@ -389,10 +394,10 @@ public void testWriteMultipleStringDataSimple() { Query query = new Query("SELECT * FROM cpu GROUP BY *", dbName); QueryResult result = this.influxDB.query(query); - Assert.assertEquals(result.getResults().get(0).getSeries().size(), 3); - Assert.assertEquals(result.getResults().get(0).getSeries().get(0).getTags().get("atag"), "test1"); - Assert.assertEquals(result.getResults().get(0).getSeries().get(1).getTags().get("atag"), "test2"); - Assert.assertEquals(result.getResults().get(0).getSeries().get(2).getTags().get("atag"), "test3"); + Assertions.assertEquals(result.getResults().get(0).getSeries().size(), 3); + Assertions.assertEquals(result.getResults().get(0).getSeries().get(0).getTags().get("atag"), "test1"); + Assertions.assertEquals(result.getResults().get(0).getSeries().get(1).getTags().get("atag"), "test2"); + Assertions.assertEquals(result.getResults().get(0).getSeries().get(2).getTags().get("atag"), "test3"); this.influxDB.deleteDatabase(dbName); } @@ -413,10 +418,10 @@ public void testWriteMultipleStringDataLines() { Query query = new Query("SELECT * FROM cpu GROUP BY *", dbName); QueryResult result = this.influxDB.query(query); - Assert.assertEquals(result.getResults().get(0).getSeries().size(), 3); - Assert.assertEquals(result.getResults().get(0).getSeries().get(0).getTags().get("atag"), "test1"); - Assert.assertEquals(result.getResults().get(0).getSeries().get(1).getTags().get("atag"), "test2"); - Assert.assertEquals(result.getResults().get(0).getSeries().get(2).getTags().get("atag"), "test3"); + Assertions.assertEquals(result.getResults().get(0).getSeries().size(), 3); + Assertions.assertEquals(result.getResults().get(0).getSeries().get(0).getTags().get("atag"), "test1"); + Assertions.assertEquals(result.getResults().get(0).getSeries().get(1).getTags().get("atag"), "test2"); + Assertions.assertEquals(result.getResults().get(0).getSeries().get(2).getTags().get("atag"), "test3"); this.influxDB.deleteDatabase(dbName); } @@ -439,10 +444,10 @@ public void testWriteMultipleStringDataLinesSimple() { Query query = new Query("SELECT * FROM cpu GROUP BY *", dbName); QueryResult result = this.influxDB.query(query); - Assert.assertEquals(result.getResults().get(0).getSeries().size(), 3); - Assert.assertEquals(result.getResults().get(0).getSeries().get(0).getTags().get("atag"), "test1"); - Assert.assertEquals(result.getResults().get(0).getSeries().get(1).getTags().get("atag"), "test2"); - Assert.assertEquals(result.getResults().get(0).getSeries().get(2).getTags().get("atag"), "test3"); + Assertions.assertEquals(result.getResults().get(0).getSeries().size(), 3); + Assertions.assertEquals(result.getResults().get(0).getSeries().get(0).getTags().get("atag"), "test1"); + Assertions.assertEquals(result.getResults().get(0).getSeries().get(1).getTags().get("atag"), "test2"); + Assertions.assertEquals(result.getResults().get(0).getSeries().get(2).getTags().get("atag"), "test3"); this.influxDB.deleteDatabase(dbName); } @@ -455,17 +460,19 @@ public void testCreateNumericNamedDatabase() { this.influxDB.createDatabase(numericDbName); List result = this.influxDB.describeDatabases(); - Assert.assertTrue(result.contains(numericDbName)); + Assertions.assertTrue(result.contains(numericDbName)); this.influxDB.deleteDatabase(numericDbName); } /** * Test that creating database which name is empty will throw expected exception */ - @Test(expected = IllegalArgumentException.class) + @Test public void testCreateEmptyNamedDatabase() { String emptyName = ""; - this.influxDB.createDatabase(emptyName); + Assertions.assertThrows(IllegalArgumentException.class, () -> { + this.influxDB.createDatabase(emptyName); + }); } /** @@ -477,7 +484,7 @@ public void testCreateDatabaseWithNameContainHyphen() { this.influxDB.createDatabase(databaseName); try { List result = this.influxDB.describeDatabases(); - Assert.assertTrue(result.contains(databaseName)); + Assertions.assertTrue(result.contains(databaseName)); } finally { this.influxDB.deleteDatabase(databaseName); } @@ -488,13 +495,13 @@ public void testCreateDatabaseWithNameContainHyphen() { */ @Test public void testIsBatchEnabled() { - Assert.assertFalse(this.influxDB.isBatchEnabled()); + Assertions.assertFalse(this.influxDB.isBatchEnabled()); this.influxDB.enableBatch(1, 1, TimeUnit.SECONDS); - Assert.assertTrue(this.influxDB.isBatchEnabled()); + Assertions.assertTrue(this.influxDB.isBatchEnabled()); this.influxDB.disableBatch(); - Assert.assertFalse(this.influxDB.isBatchEnabled()); + Assertions.assertFalse(this.influxDB.isBatchEnabled()); } /** @@ -521,29 +528,35 @@ public Thread newThread(Runnable r) { } } - Assert.assertTrue(existThreadWithSettedName); + Assertions.assertTrue(existThreadWithSettedName); this.influxDB.disableBatch(); } - @Test(expected = NullPointerException.class) + @Test public void testBatchEnabledWithThreadFactoryIsNull() { - this.influxDB.enableBatch(1, 1, TimeUnit.SECONDS, null); + Assertions.assertThrows(NullPointerException.class, () -> { + this.influxDB.enableBatch(1, 1, TimeUnit.SECONDS, null); + }); } /** * Test the implementation of {@link InfluxDBImpl#InfluxDBImpl(String, String, String, okhttp3.OkHttpClient.Builder)}. */ - @Test(expected = RuntimeException.class) + @Test public void testWrongHostForInfluxdb(){ String errorHost = "10.224.2.122_error_host"; - InfluxDBFactory.connect("http://" + errorHost + ":" + TestUtils.getInfluxPORT(true)); + Assertions.assertThrows(RuntimeException.class, () -> { + InfluxDBFactory.connect("http://" + errorHost + ":" + TestUtils.getInfluxPORT(true)); + }); } - @Test(expected = IllegalStateException.class) + @Test public void testBatchEnabledTwice() { this.influxDB.enableBatch(1, 1, TimeUnit.SECONDS); try{ - this.influxDB.enableBatch(1, 1, TimeUnit.SECONDS); + Assertions.assertThrows(IllegalStateException.class, () -> { + this.influxDB.enableBatch(1, 1, TimeUnit.SECONDS); + }); } finally { this.influxDB.disableBatch(); } @@ -556,9 +569,9 @@ public void testBatchEnabledTwice() { public void testCloseInfluxDBClient() { InfluxDB influxDB = InfluxDBFactory.connect("http://" + TestUtils.getInfluxIP() + ":" + TestUtils.getInfluxPORT(true), "admin", "admin"); influxDB.enableBatch(1, 1, TimeUnit.SECONDS); - Assert.assertTrue(influxDB.isBatchEnabled()); + Assertions.assertTrue(influxDB.isBatchEnabled()); influxDB.close(); - Assert.assertFalse(influxDB.isBatchEnabled()); + Assertions.assertFalse(influxDB.isBatchEnabled()); } /** @@ -582,10 +595,10 @@ public void testWriteEnableGzip() { Query query = new Query("SELECT * FROM cpu GROUP BY *", dbName); QueryResult result = influxDBForTestGzip.query(query); - Assert.assertEquals(result.getResults().get(0).getSeries().size(), 3); - Assert.assertEquals(result.getResults().get(0).getSeries().get(0).getTags().get("atag"), "test1"); - Assert.assertEquals(result.getResults().get(0).getSeries().get(1).getTags().get("atag"), "test2"); - Assert.assertEquals(result.getResults().get(0).getSeries().get(2).getTags().get("atag"), "test3"); + Assertions.assertEquals(result.getResults().get(0).getSeries().size(), 3); + Assertions.assertEquals(result.getResults().get(0).getSeries().get(0).getTags().get("atag"), "test1"); + Assertions.assertEquals(result.getResults().get(0).getSeries().get(1).getTags().get("atag"), "test2"); + Assertions.assertEquals(result.getResults().get(0).getSeries().get(2).getTags().get("atag"), "test3"); } finally { influxDBForTestGzip.deleteDatabase(dbName); influxDBForTestGzip.close(); @@ -601,11 +614,11 @@ public void testWriteEnableGzipAndDisableGzip() { InfluxDB influxDBForTestGzip = InfluxDBFactory.connect("http://" + TestUtils.getInfluxIP() + ":" + TestUtils.getInfluxPORT(true), "admin", "admin"); try { //test default: gzip is disable - Assert.assertFalse(influxDBForTestGzip.isGzipEnabled()); + Assertions.assertFalse(influxDBForTestGzip.isGzipEnabled()); influxDBForTestGzip.enableGzip(); - Assert.assertTrue(influxDBForTestGzip.isGzipEnabled()); + Assertions.assertTrue(influxDBForTestGzip.isGzipEnabled()); influxDBForTestGzip.disableGzip(); - Assert.assertFalse(influxDBForTestGzip.isGzipEnabled()); + Assertions.assertFalse(influxDBForTestGzip.isGzipEnabled()); } finally { influxDBForTestGzip.close(); } @@ -646,19 +659,19 @@ public void accept(QueryResult result) { this.influxDB.deleteDatabase(dbName); QueryResult result = queue.poll(20, TimeUnit.SECONDS); - Assert.assertNotNull(result); + Assertions.assertNotNull(result); System.out.println(result); - Assert.assertEquals(2, result.getResults().get(0).getSeries().get(0).getValues().size()); + Assertions.assertEquals(2, result.getResults().get(0).getSeries().get(0).getValues().size()); result = queue.poll(20, TimeUnit.SECONDS); - Assert.assertNotNull(result); + Assertions.assertNotNull(result); System.out.println(result); - Assert.assertEquals(1, result.getResults().get(0).getSeries().get(0).getValues().size()); + Assertions.assertEquals(1, result.getResults().get(0).getSeries().get(0).getValues().size()); result = queue.poll(20, TimeUnit.SECONDS); - Assert.assertNotNull(result); + Assertions.assertNotNull(result); System.out.println(result); - Assert.assertEquals("DONE", result.getError()); + Assertions.assertEquals("DONE", result.getError()); } /** @@ -682,7 +695,7 @@ public void accept(QueryResult result) { } }); this.influxDB.deleteDatabase(dbName); - Assert.assertFalse(countDownLatch.await(10, TimeUnit.SECONDS)); + Assertions.assertFalse(countDownLatch.await(10, TimeUnit.SECONDS)); } /** @@ -694,14 +707,15 @@ public void testChunkingOldVersion() throws InterruptedException { if (this.influxDB.version().startsWith("0.") || this.influxDB.version().startsWith("1.0")) { - this.exception.expect(RuntimeException.class); + Assertions.assertThrows(RuntimeException.class, () -> { String dbName = "write_unittest_" + System.currentTimeMillis(); Query query = new Query("SELECT * FROM cpu GROUP BY *", dbName); this.influxDB.query(query, 10, new Consumer() { @Override public void accept(QueryResult result) { } - }); + }); + }); } } @@ -721,17 +735,19 @@ public void testFlushPendingWritesWhenBatchingEnabled() { Query query = new Query("SELECT * FROM " + measurement + " GROUP BY *", dbName); QueryResult result = this.influxDB.query(query); - Assert.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); + Assertions.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); } finally { this.influxDB.deleteDatabase(dbName); this.influxDB.disableBatch(); } } - @Test(expected = IllegalStateException.class) + @Test public void testFlushThrowsIfBatchingIsNotEnabled() { - Assert.assertFalse(this.influxDB.isBatchEnabled()); - this.influxDB.flush(); + Assertions.assertFalse(this.influxDB.isBatchEnabled()); + Assertions.assertThrows(IllegalStateException.class, () -> { + this.influxDB.flush(); + }); } /** @@ -748,20 +764,20 @@ public void testCreateDropRetentionPolicies() { Query query = new Query("SHOW RETENTION POLICIES", dbName); QueryResult result = this.influxDB.query(query); - Assert.assertNull(result.getError()); + Assertions.assertNull(result.getError()); List> retentionPolicies = result.getResults().get(0).getSeries().get(0).getValues(); - Assert.assertTrue(retentionPolicies.get(1).contains("testRP1")); - Assert.assertTrue(retentionPolicies.get(2).contains("testRP2")); - Assert.assertTrue(retentionPolicies.get(3).contains("testRP3")); + Assertions.assertTrue(retentionPolicies.get(1).contains("testRP1")); + Assertions.assertTrue(retentionPolicies.get(2).contains("testRP2")); + Assertions.assertTrue(retentionPolicies.get(3).contains("testRP3")); this.influxDB.dropRetentionPolicy("testRP1", dbName); this.influxDB.dropRetentionPolicy("testRP2", dbName); this.influxDB.dropRetentionPolicy("testRP3", dbName); result = this.influxDB.query(query); - Assert.assertNull(result.getError()); + Assertions.assertNull(result.getError()); retentionPolicies = result.getResults().get(0).getSeries().get(0).getValues(); - Assert.assertTrue(retentionPolicies.size() == 1); + Assertions.assertTrue(retentionPolicies.size() == 1); } } diff --git a/src/test/java/org/influxdb/PerformanceTests.java b/src/test/java/org/influxdb/PerformanceTests.java index ec783abb8..98827e397 100644 --- a/src/test/java/org/influxdb/PerformanceTests.java +++ b/src/test/java/org/influxdb/PerformanceTests.java @@ -4,16 +4,19 @@ import org.influxdb.InfluxDB.LogLevel; import org.influxdb.dto.BatchPoints; import org.influxdb.dto.Point; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.platform.runner.JUnitPlatform; +import org.junit.runner.RunWith; import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; +@RunWith(JUnitPlatform.class) public class PerformanceTests { private InfluxDB influxDB; private final static int COUNT = 1; @@ -23,7 +26,7 @@ public class PerformanceTests { private final static int UDP_PORT = 8089; private final static String UDP_DATABASE = "udp"; - @Before + @BeforeEach public void setUp() { this.influxDB = InfluxDBFactory.connect("http://" + TestUtils.getInfluxIP() + ":" + TestUtils.getInfluxPORT(true), "root", "root"); this.influxDB.setLogLevel(LogLevel.NONE); @@ -33,13 +36,13 @@ public void setUp() { /** * delete UDP database after all tests end. */ - @After - public void clearup(){ + @AfterEach + public void cleanup(){ this.influxDB.deleteDatabase(UDP_DATABASE); } @Test - public void writeSinglePointPerformance() { + public void testWriteSinglePointPerformance() { String dbName = "write_" + System.currentTimeMillis(); this.influxDB.createDatabase(dbName); this.influxDB.enableBatch(2000, 100, TimeUnit.MILLISECONDS); @@ -57,9 +60,9 @@ public void writeSinglePointPerformance() { this.influxDB.deleteDatabase(dbName); } - @Ignore + @Disabled @Test - public void writePerformance() { + public void testWritePerformance() { String dbName = "writepoints_" + System.currentTimeMillis(); this.influxDB.createDatabase(dbName); String rp = TestUtils.defaultRetentionPolicy(this.influxDB.version()); @@ -89,7 +92,7 @@ public void writePerformance() { } @Test - public void maxWritePointsPerformance() { + public void testMaxWritePointsPerformance() { String dbName = "d"; this.influxDB.createDatabase(dbName); this.influxDB.enableBatch(100000, 60, TimeUnit.SECONDS); @@ -105,7 +108,7 @@ public void maxWritePointsPerformance() { } @Test - public void writeCompareUDPPerformanceForBatchWithSinglePoints() { + public void testWriteCompareUDPPerformanceForBatchWithSinglePoints() { //prepare data List lineProtocols = new ArrayList(); for (int i = 0; i < 1000; i++) { @@ -128,7 +131,7 @@ public void writeCompareUDPPerformanceForBatchWithSinglePoints() { long elapsedForSingleWrite = watch.elapsed(TimeUnit.MILLISECONDS); System.out.println("performance(ms):write udp with 1000 single strings:" + elapsedForSingleWrite); - Assert.assertTrue(elapsedForSingleWrite - elapsedForBatchWrite > 0); + Assertions.assertTrue(elapsedForSingleWrite - elapsedForBatchWrite > 0); } } diff --git a/src/test/java/org/influxdb/TicketTest.java b/src/test/java/org/influxdb/TicketTest.java index cd73b3ac0..828b30a8e 100644 --- a/src/test/java/org/influxdb/TicketTest.java +++ b/src/test/java/org/influxdb/TicketTest.java @@ -9,15 +9,19 @@ import org.influxdb.dto.BatchPoints; import org.influxdb.dto.Point; import org.influxdb.dto.Pong; -import org.junit.Before; -import org.junit.Test; - +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.junit.platform.runner.JUnitPlatform; +import org.junit.runner.RunWith; /** * Test the InfluxDB API. * * @author stefan.majer [at] gmail.com * */ +@DisplayName("Test for github issues") +@RunWith(JUnitPlatform.class) public class TicketTest { private InfluxDB influxDB; @@ -28,7 +32,7 @@ public class TicketTest { * @throws InterruptedException * @throws IOException */ - @Before + @BeforeEach public void setUp() throws InterruptedException, IOException { this.influxDB = InfluxDBFactory.connect("http://" + TestUtils.getInfluxIP() + ":" + TestUtils.getInfluxPORT(true), "admin", "admin"); boolean influxDBstarted = false; diff --git a/src/test/java/org/influxdb/dto/BatchPointTest.java b/src/test/java/org/influxdb/dto/BatchPointTest.java index 5cef6653c..a86029c34 100644 --- a/src/test/java/org/influxdb/dto/BatchPointTest.java +++ b/src/test/java/org/influxdb/dto/BatchPointTest.java @@ -9,7 +9,7 @@ import java.util.concurrent.TimeUnit; import org.influxdb.InfluxDB; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class BatchPointTest { diff --git a/src/test/java/org/influxdb/dto/PointTest.java b/src/test/java/org/influxdb/dto/PointTest.java index 1ea7ae8d4..704a2baaf 100644 --- a/src/test/java/org/influxdb/dto/PointTest.java +++ b/src/test/java/org/influxdb/dto/PointTest.java @@ -11,7 +11,8 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Assertions; /** * Test for the Point DTO. @@ -28,7 +29,7 @@ public class PointTest { * */ @Test - public void lineProtocol() { + public void testLineProtocol() { Point point = Point.measurement("test").time(1, TimeUnit.NANOSECONDS).addField("a", 1.0).build(); assertThat(point.lineProtocol()).asString().isEqualTo("test a=1.0 1"); @@ -195,33 +196,43 @@ public void testIgnoreNullPointerValue() { /** * Tests for issue #110 */ - @Test(expected = NullPointerException.class) + @Test public void testAddingTagsWithNullNameThrowsAnError() { - Point.measurement("dontcare").tag(null, "DontCare"); + Assertions.assertThrows(NullPointerException.class, () -> { + Point.measurement("dontcare").tag(null, "DontCare"); + }); } - @Test(expected = NullPointerException.class) + @Test public void testAddingTagsWithNullValueThrowsAnError() { - Point.measurement("dontcare").tag("DontCare", null); + Assertions.assertThrows(NullPointerException.class, () -> { + Point.measurement("dontcare").tag("DontCare", null); + }); } - @Test(expected = NullPointerException.class) + @Test public void testAddingMapOfTagsWithNullNameThrowsAnError() { Map map = new HashMap<>(); map.put(null, "DontCare"); - Point.measurement("dontcare").tag(map); + Assertions.assertThrows(NullPointerException.class, () -> { + Point.measurement("dontcare").tag(map); + }); } - @Test(expected = NullPointerException.class) + @Test public void testAddingMapOfTagsWithNullValueThrowsAnError() { Map map = new HashMap<>(); map.put("DontCare", null); - Point.measurement("dontcare").tag(map); + Assertions.assertThrows(NullPointerException.class, () -> { + Point.measurement("dontcare").tag(map); + }); } - @Test(expected = RuntimeException.class) + @Test public void testNullValueThrowsExceptionViaAddField() { - Point.measurement("dontcare").addField("field", (String) null); + Assertions.assertThrows(NullPointerException.class, () -> { + Point.measurement("dontcare").addField("field", (String) null); + }); } @Test diff --git a/src/test/java/org/influxdb/dto/QueryTest.java b/src/test/java/org/influxdb/dto/QueryTest.java index 5231faa9b..40987e1a5 100644 --- a/src/test/java/org/influxdb/dto/QueryTest.java +++ b/src/test/java/org/influxdb/dto/QueryTest.java @@ -2,7 +2,7 @@ import static org.assertj.core.api.Assertions.assertThat; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; diff --git a/src/test/java/org/influxdb/impl/BatchProcessorTest.java b/src/test/java/org/influxdb/impl/BatchProcessorTest.java index b43a6a322..216c1d7a1 100644 --- a/src/test/java/org/influxdb/impl/BatchProcessorTest.java +++ b/src/test/java/org/influxdb/impl/BatchProcessorTest.java @@ -1,6 +1,5 @@ package org.influxdb.impl; -import static org.hamcrest.CoreMatchers.hasItems; import static org.mockito.Mockito.any; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; @@ -8,6 +7,7 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.hamcrest.MockitoHamcrest.argThat; +import org.hamcrest.Matchers; import java.io.IOException; import java.util.concurrent.TimeUnit; @@ -16,7 +16,8 @@ import org.influxdb.InfluxDB; import org.influxdb.dto.BatchPoints; import org.influxdb.dto.Point; -import org.junit.Test; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; public class BatchProcessorTest { @@ -60,7 +61,7 @@ public void testSchedulerExceptionHandlingCallback() throws InterruptedException batchProcessor.put(batchEntry1); Thread.sleep(200); // wait for scheduler - verify(mockHandler, times(1)).accept(argThat(hasItems(point, point)), any(RuntimeException.class)); + verify(mockHandler, times(1)).accept(argThat(Matchers.hasItems(point, point)), any(RuntimeException.class)); } @Test @@ -107,24 +108,30 @@ public void testFlushWritesBufferedPointsAndDoesNotShutdownScheduler() throws In verifyNoMoreInteractions(mockInfluxDB); } - @Test(expected = IllegalArgumentException.class) + @Test public void testActionsIsZero() throws InterruptedException, IOException { InfluxDB mockInfluxDB = mock(InfluxDBImpl.class); - BatchProcessor.builder(mockInfluxDB).actions(0) + Assertions.assertThrows(IllegalArgumentException.class, () -> { + BatchProcessor.builder(mockInfluxDB).actions(0) .interval(1, TimeUnit.NANOSECONDS).build(); + }); } - @Test(expected = IllegalArgumentException.class) + @Test public void testIntervalIsZero() throws InterruptedException, IOException { InfluxDB mockInfluxDB = mock(InfluxDBImpl.class); - BatchProcessor.builder(mockInfluxDB).actions(1) + Assertions.assertThrows(IllegalArgumentException.class, () -> { + BatchProcessor.builder(mockInfluxDB).actions(1) .interval(0, TimeUnit.NANOSECONDS).build(); + }); } - @Test(expected = NullPointerException.class) + @Test public void testInfluxDBIsNull() throws InterruptedException, IOException { InfluxDB mockInfluxDB = null; - BatchProcessor.builder(mockInfluxDB).actions(1) + Assertions.assertThrows(NullPointerException.class, () -> { + BatchProcessor.builder(mockInfluxDB).actions(1) .interval(1, TimeUnit.NANOSECONDS).build(); + }); } } diff --git a/src/test/java/org/influxdb/impl/ChunkingExceptionTest.java b/src/test/java/org/influxdb/impl/ChunkingExceptionTest.java index b8e892fa6..f2865e0cd 100644 --- a/src/test/java/org/influxdb/impl/ChunkingExceptionTest.java +++ b/src/test/java/org/influxdb/impl/ChunkingExceptionTest.java @@ -18,8 +18,8 @@ import org.influxdb.TestUtils; import org.influxdb.dto.Query; import org.influxdb.dto.QueryResult; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; import org.mockito.ArgumentCaptor; import com.squareup.moshi.JsonAdapter; @@ -82,8 +82,8 @@ public void accept(QueryResult result) { callback.onResponse(call, Response.success(responseBody)); QueryResult result = queue.poll(20, TimeUnit.SECONDS); - Assert.assertNotNull(result); - Assert.assertEquals(message, result.getError()); + Assertions.assertNotNull(result); + Assertions.assertEquals(message, result.getError()); } } diff --git a/src/test/java/org/influxdb/impl/InfluxDBResultMapperTest.java b/src/test/java/org/influxdb/impl/InfluxDBResultMapperTest.java index e31333a2c..c11b2b15a 100644 --- a/src/test/java/org/influxdb/impl/InfluxDBResultMapperTest.java +++ b/src/test/java/org/influxdb/impl/InfluxDBResultMapperTest.java @@ -20,9 +20,6 @@ */ package org.influxdb.impl; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - import java.time.Instant; import java.util.Arrays; import java.util.Date; @@ -38,7 +35,8 @@ import org.influxdb.annotation.Measurement; import org.influxdb.dto.QueryResult; import org.influxdb.impl.InfluxDBResultMapper; -import org.junit.Test; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; /** * @author fmachado @@ -68,23 +66,27 @@ public void testToPOJO_HappyPath() { List myList = mapper.toPOJO(queryResult, MyCustomMeasurement.class); // Then... - assertEquals("there must be one entry in the result list", 1, myList.size()); + Assertions.assertEquals(1, myList.size(), "there must be one entry in the result list"); } - @Test(expected = IllegalArgumentException.class) + @Test public void testThrowExceptionIfMissingAnnotation() { - mapper.throwExceptionIfMissingAnnotation(String.class); + Assertions.assertThrows(IllegalArgumentException.class, () -> { + mapper.throwExceptionIfMissingAnnotation(String.class); + }); } - @Test(expected = InfluxDBMapperException.class) + @Test public void testThrowExceptionIfError_InfluxQueryResultHasError() { QueryResult queryResult = new QueryResult(); queryResult.setError("main queryresult error"); - mapper.throwExceptionIfResultWithError(queryResult); + Assertions.assertThrows(InfluxDBMapperException.class, () -> { + mapper.throwExceptionIfResultWithError(queryResult); + }); } - @Test(expected = InfluxDBMapperException.class) + @Test public void testThrowExceptionIfError_InfluxQueryResultSeriesHasError() { QueryResult queryResult = new QueryResult(); @@ -93,12 +95,14 @@ public void testThrowExceptionIfError_InfluxQueryResultSeriesHasError() { queryResult.setResults(Arrays.asList(seriesResult)); - mapper.throwExceptionIfResultWithError(queryResult); + Assertions.assertThrows(InfluxDBMapperException.class, () -> { + mapper.throwExceptionIfResultWithError(queryResult); + }); } @Test public void testGetMeasurementName_testStateMeasurement() { - assertEquals("CustomMeasurement", mapper.getMeasurementName(MyCustomMeasurement.class)); + Assertions.assertEquals("CustomMeasurement", mapper.getMeasurementName(MyCustomMeasurement.class)); } @Test @@ -120,13 +124,13 @@ public void testParseSeriesAs_testTwoValidSeries() { mapper.parseSeriesAs(series, MyCustomMeasurement.class, result); //Then... - assertTrue("there must be two series in the result list", result.size() == 2); + Assertions.assertTrue(result.size() == 2, "there must be two series in the result list"); - assertEquals("Field 'time' (1st series) is not valid", firstSeriesResult.get(0), result.get(0).time.toEpochMilli()); - assertEquals("Field 'uuid' (1st series) is not valid", firstSeriesResult.get(1), result.get(0).uuid); + Assertions.assertEquals(firstSeriesResult.get(0), result.get(0).time.toEpochMilli(), "Field 'time' (1st series) is not valid"); + Assertions.assertEquals(firstSeriesResult.get(1), result.get(0).uuid, "Field 'uuid' (1st series) is not valid"); - assertEquals("Field 'time' (2nd series) is not valid", secondSeriesResult.get(0), result.get(1).time.toEpochMilli()); - assertEquals("Field 'uuid' (2nd series) is not valid", secondSeriesResult.get(1), result.get(1).uuid); + Assertions.assertEquals(secondSeriesResult.get(0), result.get(1).time.toEpochMilli(), "Field 'time' (2nd series) is not valid"); + Assertions.assertEquals(secondSeriesResult.get(1), result.get(1).uuid, "Field 'uuid' (2nd series) is not valid"); } @Test @@ -161,27 +165,32 @@ public void testParseSeriesAs_testNonNullAndValidValues() { //Then... MyCustomMeasurement myObject = result.get(0); - assertEquals("field 'time' does not match", now.longValue(), myObject.time.toEpochMilli()); - assertEquals("field 'uuid' does not match", uuidAsString, myObject.uuid); + Assertions.assertEquals(now.longValue(), myObject.time.toEpochMilli(), "field 'time' does not match"); + Assertions.assertEquals(uuidAsString, myObject.uuid, "field 'uuid' does not match"); - assertEquals("field 'doubleObject' does not match", asDouble(seriesResult.get(2)), myObject.doubleObject); - assertEquals("field 'longObject' does not match", new Long(asDouble(seriesResult.get(3)).longValue()), myObject.longObject); - assertEquals("field 'integerObject' does not match", new Integer(asDouble(seriesResult.get(4)).intValue()), myObject.integerObject); + Assertions.assertEquals(asDouble(seriesResult.get(2)), myObject.doubleObject, "field 'doubleObject' does not match"); + Assertions.assertEquals(new Long(asDouble(seriesResult.get(3)).longValue()), myObject.longObject, "field 'longObject' does not match"); + Assertions.assertEquals(new Integer(asDouble(seriesResult.get(4)).intValue()), myObject.integerObject, "field 'integerObject' does not match"); - assertTrue("field 'doublePrimitive' does not match", - Double.compare(asDouble(seriesResult.get(5)).doubleValue(), myObject.doublePrimitive) == 0); + Assertions.assertTrue( + Double.compare(asDouble(seriesResult.get(5)).doubleValue(), myObject.doublePrimitive) == 0, + "field 'doublePrimitive' does not match"); - assertTrue("field 'longPrimitive' does not match", - Long.compare(asDouble(seriesResult.get(6)).longValue(), myObject.longPrimitive) == 0); + Assertions.assertTrue( + Long.compare(asDouble(seriesResult.get(6)).longValue(), myObject.longPrimitive) == 0, + "field 'longPrimitive' does not match"); - assertTrue("field 'integerPrimitive' does not match", - Integer.compare(asDouble(seriesResult.get(7)).intValue(), myObject.integerPrimitive) == 0); + Assertions.assertTrue( + Integer.compare(asDouble(seriesResult.get(7)).intValue(), myObject.integerPrimitive) == 0, + "field 'integerPrimitive' does not match"); - assertEquals("field 'booleanObject' does not match", - Boolean.valueOf(String.valueOf(seriesResult.get(8))), myObject.booleanObject); + Assertions.assertEquals( + Boolean.valueOf(String.valueOf(seriesResult.get(8))), myObject.booleanObject, + "field 'booleanObject' does not match"); - assertEquals("field 'booleanPrimitive' does not match", - Boolean.valueOf(String.valueOf(seriesResult.get(9))).booleanValue(), myObject.booleanPrimitive); + Assertions.assertEquals( + Boolean.valueOf(String.valueOf(seriesResult.get(9))).booleanValue(), myObject.booleanPrimitive, + "field 'booleanPrimitive' does not match"); } Double asDouble(Object obj) { @@ -205,10 +214,10 @@ public void testFieldValueModified_DateAsISO8601() { mapper.parseSeriesAs(series, MyCustomMeasurement.class, result); //Then... - assertTrue(result.size() == 1); + Assertions.assertTrue(result.size() == 1); } - @Test(expected = InfluxDBMapperException.class) + @Test public void testUnsupportedField() { // Given... mapper.cacheMeasurementClass(MyPojoWithUnsupportedField.class); @@ -222,7 +231,9 @@ public void testUnsupportedField() { //When... List result = new LinkedList<>(); - mapper.parseSeriesAs(series, MyPojoWithUnsupportedField.class, result); + Assertions.assertThrows(InfluxDBMapperException.class, () -> { + mapper.parseSeriesAs(series, MyPojoWithUnsupportedField.class, result); + }); } /** @@ -243,7 +254,7 @@ public void testToPOJO_SeriesFromQueryResultIsNull() { List myList = mapper.toPOJO(queryResult, MyCustomMeasurement.class); // Then... - assertTrue("there must NO entry in the result list", myList.isEmpty()); + Assertions.assertTrue( myList.isEmpty(), "there must NO entry in the result list"); } @Test @@ -292,12 +303,12 @@ public void testToPOJO_QueryResultCreatedByGroupByClause() { // Then... GroupByCarrierDeviceOS firstGroupByEntry = myList.get(0); - assertEquals("field 'carrier' does not match", "000/00", firstGroupByEntry.carrier); - assertEquals("field 'deviceOsVersion' does not match", "4.4.2", firstGroupByEntry.deviceOsVersion); + Assertions.assertEquals("000/00", firstGroupByEntry.carrier, "field 'carrier' does not match"); + Assertions.assertEquals("4.4.2", firstGroupByEntry.deviceOsVersion, "field 'deviceOsVersion' does not match"); GroupByCarrierDeviceOS secondGroupByEntry = myList.get(1); - assertEquals("field 'carrier' does not match", "000/01", secondGroupByEntry.carrier); - assertEquals("field 'deviceOsVersion' does not match", "9.3.5", secondGroupByEntry.deviceOsVersion); + Assertions.assertEquals("000/01", secondGroupByEntry.carrier, "field 'carrier' does not match"); + Assertions.assertEquals("9.3.5", secondGroupByEntry.deviceOsVersion, "field 'deviceOsVersion' does not match"); } @Test @@ -317,8 +328,8 @@ public void testToPOJO_ticket363() { mapper.parseSeriesAs(series, MyCustomMeasurement.class, result); // Then... - assertEquals("incorrect number of elemets", 1, result.size()); - assertEquals("incorrect value for the nanoseconds field", 1, result.get(0).time.getNano()); + Assertions.assertEquals(1, result.size(), "incorrect number of elemets"); + Assertions.assertEquals(1, result.get(0).time.getNano(), "incorrect value for the nanoseconds field"); } @Measurement(name = "CustomMeasurement") diff --git a/src/test/java/org/influxdb/impl/PreconditionsTest.java b/src/test/java/org/influxdb/impl/PreconditionsTest.java index 774d76893..43a65f618 100644 --- a/src/test/java/org/influxdb/impl/PreconditionsTest.java +++ b/src/test/java/org/influxdb/impl/PreconditionsTest.java @@ -1,43 +1,52 @@ package org.influxdb.impl; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Assertions; public class PreconditionsTest { @Test - public void checkNonEmptyString1() { + public void testCheckNonEmptyString1() { final String string = "foo"; Preconditions.checkNonEmptyString(string, "string"); } - @Test(expected = IllegalArgumentException.class) - public void checkNonEmptyString2() { + @Test + public void testCheckNonEmptyString2() { final String string = ""; - Preconditions.checkNonEmptyString(string, "string"); + Assertions.assertThrows(IllegalArgumentException.class, () -> { + Preconditions.checkNonEmptyString(string, "string"); + }); } - @Test(expected = IllegalArgumentException.class) - public void checkNonEmptyString3() { + @Test + public void testCheckNonEmptyString3() { final String string = null; - Preconditions.checkNonEmptyString(string, "string"); + Assertions.assertThrows(IllegalArgumentException.class, () -> { + Preconditions.checkNonEmptyString(string, "string"); + }); } @Test - public void checkPositiveNumber1() { + public void testCheckPositiveNumber1() { final Number number = 42; Preconditions.checkPositiveNumber(number, "number"); } - @Test(expected = IllegalArgumentException.class) - public void checkPositiveNumber2() { + @Test + public void testCheckPositiveNumber2() { final Number number = 0; - Preconditions.checkPositiveNumber(number, "number"); + Assertions.assertThrows(IllegalArgumentException.class, () -> { + Preconditions.checkPositiveNumber(number, "number"); + }); } - @Test(expected = IllegalArgumentException.class) - public void checkPositiveNumber3() { + @Test + public void testCheckPositiveNumber3() { final Number number = null; - Preconditions.checkPositiveNumber(number, "number"); + Assertions.assertThrows(IllegalArgumentException.class, () -> { + Preconditions.checkPositiveNumber(number, "number"); + }); } } \ No newline at end of file diff --git a/src/test/java/org/influxdb/impl/TimeUtilTest.java b/src/test/java/org/influxdb/impl/TimeUtilTest.java index bb8d8e7b8..a41577c9f 100644 --- a/src/test/java/org/influxdb/impl/TimeUtilTest.java +++ b/src/test/java/org/influxdb/impl/TimeUtilTest.java @@ -1,23 +1,21 @@ package org.influxdb.impl; -import org.junit.Test; +import static org.assertj.core.api.Assertions.assertThat; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.is; -import static org.junit.Assert.assertThat; +import org.junit.jupiter.api.Test; public class TimeUtilTest { @Test - public void toInfluxDBTimeFormatTest() throws Exception { - assertThat(TimeUtil.toInfluxDBTimeFormat(1477896740020L), is(equalTo("2016-10-31T06:52:20.020Z"))); - assertThat(TimeUtil.toInfluxDBTimeFormat(1477932740005L), is(equalTo("2016-10-31T16:52:20.005Z"))); + public void testToInfluxDBTimeFormatTest() throws Exception { + assertThat(TimeUtil.toInfluxDBTimeFormat(1477896740020L)).isEqualTo("2016-10-31T06:52:20.020Z"); + assertThat(TimeUtil.toInfluxDBTimeFormat(1477932740005L)).isEqualTo("2016-10-31T16:52:20.005Z"); } @Test - public void fromInfluxDBTimeFormatTest() throws Exception { - assertThat(TimeUtil.fromInfluxDBTimeFormat("2016-10-31T06:52:20.020Z"), is(equalTo(1477896740020L))); - assertThat(TimeUtil.fromInfluxDBTimeFormat("2016-10-31T16:52:20.005Z"), is(equalTo(1477932740005L))); - assertThat(TimeUtil.fromInfluxDBTimeFormat("2016-10-31T16:52:20Z"), is(equalTo(1477932740000L))); - assertThat(TimeUtil.fromInfluxDBTimeFormat("2016-10-31T06:52:20Z"), is(equalTo(1477896740000L))); + public void testFromInfluxDBTimeFormatTest() throws Exception { + assertThat(TimeUtil.fromInfluxDBTimeFormat("2016-10-31T06:52:20.020Z")).isEqualTo(1477896740020L); + assertThat(TimeUtil.fromInfluxDBTimeFormat("2016-10-31T16:52:20.005Z")).isEqualTo(1477932740005L); + assertThat(TimeUtil.fromInfluxDBTimeFormat("2016-10-31T16:52:20Z")).isEqualTo(1477932740000L); + assertThat(TimeUtil.fromInfluxDBTimeFormat("2016-10-31T06:52:20Z")).isEqualTo(1477896740000L); } } From bde2b4d7a575bc170373621d4b71db2ec8834b60 Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Mon, 20 Nov 2017 20:57:45 +0100 Subject: [PATCH 079/745] Run all test with JUnitPlatform --- src/test/java/org/influxdb/dto/BatchPointTest.java | 4 +++- src/test/java/org/influxdb/dto/PointTest.java | 3 +++ src/test/java/org/influxdb/dto/QueryTest.java | 3 +++ src/test/java/org/influxdb/impl/BatchProcessorTest.java | 3 +++ src/test/java/org/influxdb/impl/ChunkingExceptionTest.java | 3 +++ src/test/java/org/influxdb/impl/InfluxDBResultMapperTest.java | 3 +++ src/test/java/org/influxdb/impl/PreconditionsTest.java | 3 +++ src/test/java/org/influxdb/impl/TimeUtilTest.java | 3 +++ 8 files changed, 24 insertions(+), 1 deletion(-) diff --git a/src/test/java/org/influxdb/dto/BatchPointTest.java b/src/test/java/org/influxdb/dto/BatchPointTest.java index a86029c34..0cac6a64b 100644 --- a/src/test/java/org/influxdb/dto/BatchPointTest.java +++ b/src/test/java/org/influxdb/dto/BatchPointTest.java @@ -10,8 +10,10 @@ import org.influxdb.InfluxDB; import org.junit.jupiter.api.Test; +import org.junit.platform.runner.JUnitPlatform; +import org.junit.runner.RunWith; - +@RunWith(JUnitPlatform.class) public class BatchPointTest { @Test diff --git a/src/test/java/org/influxdb/dto/PointTest.java b/src/test/java/org/influxdb/dto/PointTest.java index 704a2baaf..e828fd9cc 100644 --- a/src/test/java/org/influxdb/dto/PointTest.java +++ b/src/test/java/org/influxdb/dto/PointTest.java @@ -13,6 +13,8 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Assertions; +import org.junit.platform.runner.JUnitPlatform; +import org.junit.runner.RunWith; /** * Test for the Point DTO. @@ -20,6 +22,7 @@ * @author stefan.majer [at] gmail.com * */ +@RunWith(JUnitPlatform.class) public class PointTest { /** diff --git a/src/test/java/org/influxdb/dto/QueryTest.java b/src/test/java/org/influxdb/dto/QueryTest.java index 40987e1a5..d5d67f35d 100644 --- a/src/test/java/org/influxdb/dto/QueryTest.java +++ b/src/test/java/org/influxdb/dto/QueryTest.java @@ -3,6 +3,8 @@ import static org.assertj.core.api.Assertions.assertThat; import org.junit.jupiter.api.Test; +import org.junit.platform.runner.JUnitPlatform; +import org.junit.runner.RunWith; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; @@ -15,6 +17,7 @@ * @author jord [at] moz.com * */ +@RunWith(JUnitPlatform.class) public class QueryTest { /** diff --git a/src/test/java/org/influxdb/impl/BatchProcessorTest.java b/src/test/java/org/influxdb/impl/BatchProcessorTest.java index 216c1d7a1..c30c3b388 100644 --- a/src/test/java/org/influxdb/impl/BatchProcessorTest.java +++ b/src/test/java/org/influxdb/impl/BatchProcessorTest.java @@ -18,7 +18,10 @@ import org.influxdb.dto.Point; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; +import org.junit.platform.runner.JUnitPlatform; +import org.junit.runner.RunWith; +@RunWith(JUnitPlatform.class) public class BatchProcessorTest { @Test diff --git a/src/test/java/org/influxdb/impl/ChunkingExceptionTest.java b/src/test/java/org/influxdb/impl/ChunkingExceptionTest.java index f2865e0cd..c81189b92 100644 --- a/src/test/java/org/influxdb/impl/ChunkingExceptionTest.java +++ b/src/test/java/org/influxdb/impl/ChunkingExceptionTest.java @@ -20,6 +20,8 @@ import org.influxdb.dto.QueryResult; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; +import org.junit.platform.runner.JUnitPlatform; +import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import com.squareup.moshi.JsonAdapter; @@ -32,6 +34,7 @@ import retrofit2.Callback; import retrofit2.Response; +@RunWith(JUnitPlatform.class) public class ChunkingExceptionTest { @Test diff --git a/src/test/java/org/influxdb/impl/InfluxDBResultMapperTest.java b/src/test/java/org/influxdb/impl/InfluxDBResultMapperTest.java index c11b2b15a..688ab9387 100644 --- a/src/test/java/org/influxdb/impl/InfluxDBResultMapperTest.java +++ b/src/test/java/org/influxdb/impl/InfluxDBResultMapperTest.java @@ -37,10 +37,13 @@ import org.influxdb.impl.InfluxDBResultMapper; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; +import org.junit.platform.runner.JUnitPlatform; +import org.junit.runner.RunWith; /** * @author fmachado */ +@RunWith(JUnitPlatform.class) public class InfluxDBResultMapperTest { InfluxDBResultMapper mapper = new InfluxDBResultMapper(); diff --git a/src/test/java/org/influxdb/impl/PreconditionsTest.java b/src/test/java/org/influxdb/impl/PreconditionsTest.java index 43a65f618..c0ba82add 100644 --- a/src/test/java/org/influxdb/impl/PreconditionsTest.java +++ b/src/test/java/org/influxdb/impl/PreconditionsTest.java @@ -2,7 +2,10 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Assertions; +import org.junit.platform.runner.JUnitPlatform; +import org.junit.runner.RunWith; +@RunWith(JUnitPlatform.class) public class PreconditionsTest { @Test diff --git a/src/test/java/org/influxdb/impl/TimeUtilTest.java b/src/test/java/org/influxdb/impl/TimeUtilTest.java index a41577c9f..82efe041d 100644 --- a/src/test/java/org/influxdb/impl/TimeUtilTest.java +++ b/src/test/java/org/influxdb/impl/TimeUtilTest.java @@ -3,7 +3,10 @@ import static org.assertj.core.api.Assertions.assertThat; import org.junit.jupiter.api.Test; +import org.junit.platform.runner.JUnitPlatform; +import org.junit.runner.RunWith; +@RunWith(JUnitPlatform.class) public class TimeUtilTest { @Test public void testToInfluxDBTimeFormatTest() throws Exception { From 0710fa3b925f09c0d5eacfb57f1ca595f39ed6d5 Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Tue, 21 Nov 2017 08:04:05 +0100 Subject: [PATCH 080/745] Add Changelog entry, fix markdownlint issues in CHANGELOG.md --- CHANGELOG.md | 139 ++++++++++++++++++++++++++------------------------- 1 file changed, 72 insertions(+), 67 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 27404396c..31dfaa616 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,115 +1,120 @@ +# Changelog + ## 2.8 [unreleased] -#### Fixes +### Fixes - - InfluxDBResultMapper now is able to process QueryResult created when a GROUP BY clause was used [PR #345](https://github.com/influxdata/influxdb-java/pull/345) - - InfluxDB will now handle the timestamp on its own if none is provided [PR#350](https://github.com/influxdata/influxdb-java/pull/350) +- InfluxDBResultMapper now is able to process QueryResult created when a GROUP BY clause was used [PR #345](https://github.com/influxdata/influxdb-java/pull/345) +- InfluxDB will now handle the timestamp on its own if none is provided [PR#350](https://github.com/influxdata/influxdb-java/pull/350) -#### Features +### Features - - API: add InfluxDB#createRetentionPolicy and InfluxDB#dropRetentionPolicy to be able to create and drop Retention Policies [PR #351](https://github.com/influxdata/influxdb-java/pull/351) - - API: add InfluxDB#query that uses callbacks +- API: add InfluxDB#createRetentionPolicy and InfluxDB#dropRetentionPolicy to be able to create and drop Retention Policies [PR #351](https://github.com/influxdata/influxdb-java/pull/351) +- API: add InfluxDB#query that uses callbacks -#### Improvements +### Improvements - - Build: all unit and integration test are now running with jdk8 and jdk9. +- Build: all unit and integration test are now running with jdk8 and jdk9. +- Test: migration to junit5 ## v2.7 [2017-06-26] -#### Features +### Features - - Simplify write() methods for use cases writing all points to same database and retention policy [PR #327](https://github.com/influxdata/influxdb-java/pull/327) - - QueryResult to Object mapper added [PR #341](https://github.com/influxdata/influxdb-java/pull/341) +- Simplify write() methods for use cases writing all points to same database and retention policy [PR #327](https://github.com/influxdata/influxdb-java/pull/327) +- QueryResult to Object mapper added [PR #341](https://github.com/influxdata/influxdb-java/pull/341) -#### Fixes +### Fixes - - Replace RuntimeException with InfluxDBException [Issue #323](https://github.com/influxdata/influxdb-java/issues/323) +- Replace RuntimeException with InfluxDBException [Issue #323](https://github.com/influxdata/influxdb-java/issues/323) -#### Improvements +### Improvements - - Significant (~35%) performance improvements for write speed with less memory footprint. [PR #330](https://github.com/influxdata/influxdb-java/pull/330) - - Drop guava runtime dependency which reduces jar size from 1MB -> 49KB [PR #322](https://github.com/influxdata/influxdb-java/pull/322) +- Significant (~35%) performance improvements for write speed with less memory footprint. [PR #330](https://github.com/influxdata/influxdb-java/pull/330) +- Drop guava runtime dependency which reduces jar size from 1MB -> 49KB [PR #322](https://github.com/influxdata/influxdb-java/pull/322) ## v2.6 [2017-06-08] -#### Features +### Features - - Switch to Java 1.8 - - Support chunking - - Add a databaseExists method to InfluxDB interface - - [Issue #289](https://github.com/influxdata/influxdb-java/issues/289) Batching enhancements: Pending asynchronous writes can be explicitly flushed via `InfluxDB.flush()`. - - Add a listener to notify asynchronous errors during batch flushes [PR #318](https://github.com/influxdata/influxdb-java/pull/318). +- Switch to Java 1.8 +- Support chunking +- Add a databaseExists method to InfluxDB interface +- [Issue #289](https://github.com/influxdata/influxdb-java/issues/289) Batching enhancements: Pending asynchronous writes can be explicitly flushed via `InfluxDB.flush()`. +- Add a listener to notify asynchronous errors during batch flushes [PR #318](https://github.com/influxdata/influxdb-java/pull/318). -#### Fixes +### Fixes - - [Issue #263](https://github.com/influxdata/influxdb-java/issues/263) Add databaseExists method to InfluxDB interface. +- [Issue #263](https://github.com/influxdata/influxdb-java/issues/263) Add databaseExists method to InfluxDB interface. -#### Improvements +### Improvements - - Update retrofit from 2.1 to 2.2 - - Update slf4j from 1.7.22 to 1.7.24 - - Update okhttp3 from 3.5 to 3.6 - - automatically adjust batch processor capacity [PR #282](https://github.com/influxdata/influxdb-java/pull/282) +- Update retrofit from 2.1 to 2.2 +- Update slf4j from 1.7.22 to 1.7.24 +- Update okhttp3 from 3.5 to 3.6 +- automatically adjust batch processor capacity [PR #282](https://github.com/influxdata/influxdb-java/pull/282) ## v2.5 [2016-12-05] -#### Features +### Features - - Support writing by UDP protocal. - - Support gzip compress for http request body. - - Support setting thread factory for batch processor. - - Support chunking +- Support writing by UDP protocal. +- Support gzip compress for http request body. +- Support setting thread factory for batch processor. +- Support chunking -#### Fixes +### Fixes - - [Issue #162](https://github.com/influxdb/influxdb-java/issues/162) Write point using async batch mode with different rp will use same rp. - - [Issue #214](https://github.com/influxdb/influxdb-java/issues/214) Send multiple queries in one query statement will get only one result. - - Write can't be always async if batch is enabled. +- [Issue #162](https://github.com/influxdb/influxdb-java/issues/162) Write point using async batch mode with different rp will use same rp. +- [Issue #214](https://github.com/influxdb/influxdb-java/issues/214) Send multiple queries in one query statement will get only one result. +- Write can't be always async if batch is enabled. -#### Improvements +### Improvements - - Remove the limit for database name: not contain '-'. - - Support creating influxdb instance without username and password. - - Add time related util methods for converting influxdb timestamp or unix epoch time. - - correct exception type when disable batch twice. +- Remove the limit for database name: not contain '-'. +- Support creating influxdb instance without username and password. +- Add time related util methods for converting influxdb timestamp or unix epoch time. +- correct exception type when disable batch twice. ## v2.4 [2016-10-24] -#### Features - - now uses okhttp3 and retrofit2. As a result, you can now pass an OkHttpClient.Builder to the InfluxDBFactory.connect if you wish to add more interceptors, etc, to OkHttp. - - in InfluxDB 1.0.0, some queries now require a POST instead of GET. There is a flag on Query that allow this to be specified (default is still GET). +### Features + +- now uses okhttp3 and retrofit2. As a result, you can now pass an OkHttpClient.Builder to the InfluxDBFactory.connect if you wish to add more interceptors, etc, to OkHttp. +- in InfluxDB 1.0.0, some queries now require a POST instead of GET. There is a flag on Query that allow this to be specified (default is still GET). ## v2.2 [2016-04-11] -#### Features +### Features + +- Allow writing of pre-constructed line protocol strings - - Allow writing of pre-constructed line protocol strings +### Fixes -#### Fixes +- Correct escaping of database names for create and delete database actions +- Many bug fixes / improvements in general - - Correct escaping of database names for create and delete database actions - - Many bug fixes / improvements in general +### Other -#### Other - - Deprecated `field()` method in preference for `addField()` methods. +- Deprecated `field()` method in preference for `addField()` methods. ## v2.1 [2015-12-05] -#### Features +### Features - - Extensions to fluent builder classes - - Convenience methods for building Points - - Allow integer types as field values +- Extensions to fluent builder classes +- Convenience methods for building Points +- Allow integer types as field values -#### Fixes +### Fixes - - Fixed escaping of tag and field values - - Always uses nanosecond precision for time - - Uses NumberFormat class for safer formatting of large numbers. +- Fixed escaping of tag and field values +- Always uses nanosecond precision for time +- Uses NumberFormat class for safer formatting of large numbers. ## v2.0 [2015-07-17] -#### Features +### Features - Compatible with InfluxDB version 0.9+ - Support for lineprotocol @@ -121,7 +126,7 @@ No major functional changes or improvements. Mainly library updates and code str ## v1.3 [2014-10-22] -#### Features +### Features - Compatible with InfluxDB Version up to 0.8 - API: add a InfluxDB#createDatabase(DatabaseConfiguration) to be able to create a new Database with ShardSpaces defined. @@ -134,12 +139,12 @@ No major functional changes or improvements. Mainly library updates and code str ## v1.2 [2014-06-28] -#### Features +### Features - [Issue #2](https://github.com/influxdb/influxdb-java/issues/2) Implement the last missing api calls ( interfaces, sync, forceCompaction, servers, shards) - use (http://square.github.io/okhttp/, okhttp) instead of java builtin httpconnection to get failover for the http endpoint. -#### Tasks +### Tasks - [Issue #8](https://github.com/influxdb/influxdb-java/issues/8) Use com.github.docker-java which replaces com.kpelykh for Integration tests. - [Issue #6](https://github.com/influxdb/influxdb-java/issues/6) Update Retrofit to 1.6.0 @@ -148,7 +153,7 @@ No major functional changes or improvements. Mainly library updates and code str ## v1.1 [2014-05-31] -#### Features +### Features - Add InfluxDB#version() to get the InfluxDB Server version information. - Changed InfluxDB#createDatabase() to match [Issue #489](https://github.com/influxdb/influxdb/issues/489) without replicationFactor. @@ -156,4 +161,4 @@ No major functional changes or improvements. Mainly library updates and code str ## v1.0 [2014-05-6] - * Initial Release +- Initial Release From 0df2b2cf5d9d3e103dba0e93e7cc1858d5d37505 Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Tue, 21 Nov 2017 10:53:30 +0100 Subject: [PATCH 081/745] assertEquals first argument is expected, second actual --- src/test/java/org/influxdb/InfluxDBTest.java | 42 ++++++++++---------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/src/test/java/org/influxdb/InfluxDBTest.java b/src/test/java/org/influxdb/InfluxDBTest.java index cd505dd63..0953f6390 100644 --- a/src/test/java/org/influxdb/InfluxDBTest.java +++ b/src/test/java/org/influxdb/InfluxDBTest.java @@ -306,9 +306,9 @@ public void testWriteMultipleStringDataThroughUDP() { QueryResult result = this.influxDB.query(query); Assertions.assertEquals(3, result.getResults().get(0).getSeries().size()); - Assertions.assertEquals(result.getResults().get(0).getSeries().get(0).getTags().get("atag"), "test1"); - Assertions.assertEquals(result.getResults().get(0).getSeries().get(1).getTags().get("atag"), "test2"); - Assertions.assertEquals(result.getResults().get(0).getSeries().get(2).getTags().get("atag"), "test3"); + Assertions.assertEquals("test1", result.getResults().get(0).getSeries().get(0).getTags().get("atag")); + Assertions.assertEquals("test2", result.getResults().get(0).getSeries().get(1).getTags().get("atag")); + Assertions.assertEquals("test3", result.getResults().get(0).getSeries().get(2).getTags().get("atag")); } /** @@ -327,9 +327,9 @@ public void testWriteMultipleStringDataLinesThroughUDP() { QueryResult result = this.influxDB.query(query); Assertions.assertEquals(3, result.getResults().get(0).getSeries().size()); - Assertions.assertEquals(result.getResults().get(0).getSeries().get(0).getTags().get("atag"), "test1"); - Assertions.assertEquals(result.getResults().get(0).getSeries().get(1).getTags().get("atag"), "test2"); - Assertions.assertEquals(result.getResults().get(0).getSeries().get(2).getTags().get("atag"), "test3"); + Assertions.assertEquals("test1", result.getResults().get(0).getSeries().get(0).getTags().get("atag")); + Assertions.assertEquals("test2", result.getResults().get(0).getSeries().get(1).getTags().get("atag")); + Assertions.assertEquals("test3", result.getResults().get(0).getSeries().get(2).getTags().get("atag")); } /** @@ -373,9 +373,9 @@ public void testWriteMultipleStringData() { QueryResult result = this.influxDB.query(query); Assertions.assertEquals(result.getResults().get(0).getSeries().size(), 3); - Assertions.assertEquals(result.getResults().get(0).getSeries().get(0).getTags().get("atag"), "test1"); - Assertions.assertEquals(result.getResults().get(0).getSeries().get(1).getTags().get("atag"), "test2"); - Assertions.assertEquals(result.getResults().get(0).getSeries().get(2).getTags().get("atag"), "test3"); + Assertions.assertEquals("test1", result.getResults().get(0).getSeries().get(0).getTags().get("atag")); + Assertions.assertEquals("test2", result.getResults().get(0).getSeries().get(1).getTags().get("atag")); + Assertions.assertEquals("test3", result.getResults().get(0).getSeries().get(2).getTags().get("atag")); this.influxDB.deleteDatabase(dbName); } @@ -395,9 +395,9 @@ public void testWriteMultipleStringDataSimple() { QueryResult result = this.influxDB.query(query); Assertions.assertEquals(result.getResults().get(0).getSeries().size(), 3); - Assertions.assertEquals(result.getResults().get(0).getSeries().get(0).getTags().get("atag"), "test1"); - Assertions.assertEquals(result.getResults().get(0).getSeries().get(1).getTags().get("atag"), "test2"); - Assertions.assertEquals(result.getResults().get(0).getSeries().get(2).getTags().get("atag"), "test3"); + Assertions.assertEquals("test1", result.getResults().get(0).getSeries().get(0).getTags().get("atag")); + Assertions.assertEquals("test2", result.getResults().get(0).getSeries().get(1).getTags().get("atag")); + Assertions.assertEquals("test3", result.getResults().get(0).getSeries().get(2).getTags().get("atag")); this.influxDB.deleteDatabase(dbName); } @@ -419,9 +419,9 @@ public void testWriteMultipleStringDataLines() { QueryResult result = this.influxDB.query(query); Assertions.assertEquals(result.getResults().get(0).getSeries().size(), 3); - Assertions.assertEquals(result.getResults().get(0).getSeries().get(0).getTags().get("atag"), "test1"); - Assertions.assertEquals(result.getResults().get(0).getSeries().get(1).getTags().get("atag"), "test2"); - Assertions.assertEquals(result.getResults().get(0).getSeries().get(2).getTags().get("atag"), "test3"); + Assertions.assertEquals("test1", result.getResults().get(0).getSeries().get(0).getTags().get("atag")); + Assertions.assertEquals("test2", result.getResults().get(0).getSeries().get(1).getTags().get("atag")); + Assertions.assertEquals("test3", result.getResults().get(0).getSeries().get(2).getTags().get("atag")); this.influxDB.deleteDatabase(dbName); } @@ -445,9 +445,9 @@ public void testWriteMultipleStringDataLinesSimple() { QueryResult result = this.influxDB.query(query); Assertions.assertEquals(result.getResults().get(0).getSeries().size(), 3); - Assertions.assertEquals(result.getResults().get(0).getSeries().get(0).getTags().get("atag"), "test1"); - Assertions.assertEquals(result.getResults().get(0).getSeries().get(1).getTags().get("atag"), "test2"); - Assertions.assertEquals(result.getResults().get(0).getSeries().get(2).getTags().get("atag"), "test3"); + Assertions.assertEquals("test1", result.getResults().get(0).getSeries().get(0).getTags().get("atag")); + Assertions.assertEquals("test2", result.getResults().get(0).getSeries().get(1).getTags().get("atag")); + Assertions.assertEquals("test3", result.getResults().get(0).getSeries().get(2).getTags().get("atag")); this.influxDB.deleteDatabase(dbName); } @@ -596,9 +596,9 @@ public void testWriteEnableGzip() { QueryResult result = influxDBForTestGzip.query(query); Assertions.assertEquals(result.getResults().get(0).getSeries().size(), 3); - Assertions.assertEquals(result.getResults().get(0).getSeries().get(0).getTags().get("atag"), "test1"); - Assertions.assertEquals(result.getResults().get(0).getSeries().get(1).getTags().get("atag"), "test2"); - Assertions.assertEquals(result.getResults().get(0).getSeries().get(2).getTags().get("atag"), "test3"); + Assertions.assertEquals("test1", result.getResults().get(0).getSeries().get(0).getTags().get("atag")); + Assertions.assertEquals("test2", result.getResults().get(0).getSeries().get(1).getTags().get("atag")); + Assertions.assertEquals("test3", result.getResults().get(0).getSeries().get(2).getTags().get("atag")); } finally { influxDBForTestGzip.deleteDatabase(dbName); influxDBForTestGzip.close(); From 6f7181ae2844ce72ab46c321fa9170450e638c6b Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Wed, 22 Nov 2017 09:37:28 +0100 Subject: [PATCH 082/745] Fix single test which was failing on travis because it relied on order of execution previously --- src/test/java/org/influxdb/PerformanceTests.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/test/java/org/influxdb/PerformanceTests.java b/src/test/java/org/influxdb/PerformanceTests.java index 98827e397..47a80eb58 100644 --- a/src/test/java/org/influxdb/PerformanceTests.java +++ b/src/test/java/org/influxdb/PerformanceTests.java @@ -116,6 +116,10 @@ public void testWriteCompareUDPPerformanceForBatchWithSinglePoints() { lineProtocols.add(point.lineProtocol()); } + String dbName = "write_compare_udp_" + System.currentTimeMillis(); + this.influxDB.createDatabase(dbName); + this.influxDB.enableBatch(2000, 100, TimeUnit.MILLISECONDS); + //write batch of 1000 single string. Stopwatch watch = Stopwatch.createStarted(); this.influxDB.write(UDP_PORT, lineProtocols); @@ -127,6 +131,7 @@ public void testWriteCompareUDPPerformanceForBatchWithSinglePoints() { for (String lineProtocol: lineProtocols){ this.influxDB.write(UDP_PORT, lineProtocol); } + this.influxDB.deleteDatabase(dbName); long elapsedForSingleWrite = watch.elapsed(TimeUnit.MILLISECONDS); System.out.println("performance(ms):write udp with 1000 single strings:" + elapsedForSingleWrite); From df4af4599d568d9b1a7acf731254acdb8214f558 Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Thu, 23 Nov 2017 10:21:37 +0100 Subject: [PATCH 083/745] Get rid of guava even for tests --- pom.xml | 6 ----- src/test/java/org/influxdb/InfluxDBTest.java | 26 +++++++++---------- .../java/org/influxdb/PerformanceTests.java | 21 +++++++-------- 3 files changed, 22 insertions(+), 31 deletions(-) diff --git a/pom.xml b/pom.xml index 979f63a01..84e622574 100644 --- a/pom.xml +++ b/pom.xml @@ -239,12 +239,6 @@ 2.12.0 test - - com.google.guava - guava - 23.4-jre - test - com.squareup.retrofit2 retrofit diff --git a/src/test/java/org/influxdb/InfluxDBTest.java b/src/test/java/org/influxdb/InfluxDBTest.java index 0953f6390..e206ffaa0 100644 --- a/src/test/java/org/influxdb/InfluxDBTest.java +++ b/src/test/java/org/influxdb/InfluxDBTest.java @@ -26,8 +26,6 @@ import org.junit.platform.runner.JUnitPlatform; import org.junit.runner.RunWith; -import com.google.common.util.concurrent.Uninterruptibles; - /** * Test the InfluxDB API. * @@ -195,12 +193,12 @@ public void testWrite() { * Test the implementation of {@link InfluxDB#write(int, Point)}'s sync support. */ @Test - public void testSyncWritePointThroughUDP() { + public void testSyncWritePointThroughUDP() throws InterruptedException { this.influxDB.disableBatch(); String measurement = TestUtils.getRandomMeasurement(); Point point = Point.measurement(measurement).tag("atag", "test").addField("used", 80L).addField("free", 1L).build(); this.influxDB.write(UDP_PORT, point); - Uninterruptibles.sleepUninterruptibly(2, TimeUnit.SECONDS); + Thread.sleep(2000); Query query = new Query("SELECT * FROM " + measurement + " GROUP BY *", UDP_DATABASE); QueryResult result = this.influxDB.query(query); Assertions.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); @@ -210,14 +208,14 @@ public void testSyncWritePointThroughUDP() { * Test the implementation of {@link InfluxDB#write(int, Point)}'s async support. */ @Test - public void testAsyncWritePointThroughUDP() { + public void testAsyncWritePointThroughUDP() throws InterruptedException { this.influxDB.enableBatch(1, 1, TimeUnit.SECONDS); try{ Assertions.assertTrue(this.influxDB.isBatchEnabled()); String measurement = TestUtils.getRandomMeasurement(); Point point = Point.measurement(measurement).tag("atag", "test").addField("used", 80L).addField("free", 1L).build(); this.influxDB.write(UDP_PORT, point); - Uninterruptibles.sleepUninterruptibly(2, TimeUnit.SECONDS); + Thread.sleep(2000); Query query = new Query("SELECT * FROM " + measurement + " GROUP BY *", UDP_DATABASE); QueryResult result = this.influxDB.query(query); Assertions.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); @@ -282,11 +280,11 @@ public void testWriteStringDataSimple() { * Test writing to the database using string protocol through UDP. */ @Test - public void testWriteStringDataThroughUDP() { + public void testWriteStringDataThroughUDP() throws InterruptedException { String measurement = TestUtils.getRandomMeasurement(); this.influxDB.write(UDP_PORT, measurement + ",atag=test idle=90,usertime=9,system=1"); //write with UDP may be executed on server after query with HTTP. so sleep 2s to handle this case - Uninterruptibles.sleepUninterruptibly(2, TimeUnit.SECONDS); + Thread.sleep(2000); Query query = new Query("SELECT * FROM " + measurement + " GROUP BY *", UDP_DATABASE); QueryResult result = this.influxDB.query(query); Assertions.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); @@ -296,12 +294,12 @@ public void testWriteStringDataThroughUDP() { * Test writing multiple records to the database using string protocol through UDP. */ @Test - public void testWriteMultipleStringDataThroughUDP() { + public void testWriteMultipleStringDataThroughUDP() throws InterruptedException { String measurement = TestUtils.getRandomMeasurement(); this.influxDB.write(UDP_PORT, measurement + ",atag=test1 idle=100,usertime=10,system=1\n" + measurement + ",atag=test2 idle=200,usertime=20,system=2\n" + measurement + ",atag=test3 idle=300,usertime=30,system=3"); - Uninterruptibles.sleepUninterruptibly(2, TimeUnit.SECONDS); + Thread.sleep(2000); Query query = new Query("SELECT * FROM " + measurement + " GROUP BY *", UDP_DATABASE); QueryResult result = this.influxDB.query(query); @@ -315,14 +313,14 @@ public void testWriteMultipleStringDataThroughUDP() { * Test writing multiple separate records to the database using string protocol through UDP. */ @Test - public void testWriteMultipleStringDataLinesThroughUDP() { + public void testWriteMultipleStringDataLinesThroughUDP() throws InterruptedException { String measurement = TestUtils.getRandomMeasurement(); this.influxDB.write(UDP_PORT, Arrays.asList( measurement + ",atag=test1 idle=100,usertime=10,system=1", measurement + ",atag=test2 idle=200,usertime=20,system=2", measurement + ",atag=test3 idle=300,usertime=30,system=3" )); - Uninterruptibles.sleepUninterruptibly(2, TimeUnit.SECONDS); + Thread.sleep(2000); Query query = new Query("SELECT * FROM " + measurement + " GROUP BY *", UDP_DATABASE); QueryResult result = this.influxDB.query(query); @@ -646,7 +644,7 @@ public void testChunking() throws InterruptedException { batchPoints.point(point3); this.influxDB.write(batchPoints); - Uninterruptibles.sleepUninterruptibly(2, TimeUnit.SECONDS); + Thread.sleep(2000); final BlockingQueue queue = new LinkedBlockingQueue<>(); Query query = new Query("SELECT * FROM disk", dbName); this.influxDB.query(query, 2, new Consumer() { @@ -655,7 +653,7 @@ public void accept(QueryResult result) { queue.add(result); }}); - Uninterruptibles.sleepUninterruptibly(2, TimeUnit.SECONDS); + Thread.sleep(2000); this.influxDB.deleteDatabase(dbName); QueryResult result = queue.poll(20, TimeUnit.SECONDS); diff --git a/src/test/java/org/influxdb/PerformanceTests.java b/src/test/java/org/influxdb/PerformanceTests.java index 47a80eb58..e7c363d0e 100644 --- a/src/test/java/org/influxdb/PerformanceTests.java +++ b/src/test/java/org/influxdb/PerformanceTests.java @@ -1,6 +1,5 @@ package org.influxdb; -import com.google.common.base.Stopwatch; import org.influxdb.InfluxDB.LogLevel; import org.influxdb.dto.BatchPoints; import org.influxdb.dto.Point; @@ -47,7 +46,7 @@ public void testWriteSinglePointPerformance() { this.influxDB.createDatabase(dbName); this.influxDB.enableBatch(2000, 100, TimeUnit.MILLISECONDS); String rp = TestUtils.defaultRetentionPolicy(this.influxDB.version()); - Stopwatch watch = Stopwatch.createStarted(); + long start = System.currentTimeMillis(); for (int j = 0; j < SINGLE_POINT_COUNT; j++) { Point point = Point.measurement("cpu") .addField("idle", (double) j) @@ -56,7 +55,7 @@ public void testWriteSinglePointPerformance() { this.influxDB.write(dbName, rp, point); } this.influxDB.disableBatch(); - System.out.println("Single Point Write for " + SINGLE_POINT_COUNT + " writes of Points took:" + watch); + System.out.println("Single Point Write for " + SINGLE_POINT_COUNT + " writes of Points took:" + (System.currentTimeMillis() - start)); this.influxDB.deleteDatabase(dbName); } @@ -67,7 +66,7 @@ public void testWritePerformance() { this.influxDB.createDatabase(dbName); String rp = TestUtils.defaultRetentionPolicy(this.influxDB.version()); - Stopwatch watch = Stopwatch.createStarted(); + long start = System.currentTimeMillis(); for (int i = 0; i < COUNT; i++) { BatchPoints batchPoints = BatchPoints @@ -87,7 +86,7 @@ public void testWritePerformance() { this.influxDB.write(batchPoints); } - System.out.println("WritePoints for " + COUNT + " writes of " + POINT_COUNT + " Points took:" + watch); + System.out.println("WritePoints for " + COUNT + " writes of " + POINT_COUNT + " Points took:" + (System.currentTimeMillis() - start)); this.influxDB.deleteDatabase(dbName); } @@ -98,12 +97,12 @@ public void testMaxWritePointsPerformance() { this.influxDB.enableBatch(100000, 60, TimeUnit.SECONDS); String rp = TestUtils.defaultRetentionPolicy(this.influxDB.version()); - Stopwatch watch = Stopwatch.createStarted(); + long start = System.currentTimeMillis(); for (int i = 0; i < 2000000; i++) { Point point = Point.measurement("s").addField("v", 1.0).build(); this.influxDB.write(dbName, rp, point); } - System.out.println("5Mio points:" + watch); + System.out.println("5Mio points:" + (System.currentTimeMillis() - start)); this.influxDB.deleteDatabase(dbName); } @@ -121,19 +120,19 @@ public void testWriteCompareUDPPerformanceForBatchWithSinglePoints() { this.influxDB.enableBatch(2000, 100, TimeUnit.MILLISECONDS); //write batch of 1000 single string. - Stopwatch watch = Stopwatch.createStarted(); + long start = System.currentTimeMillis(); this.influxDB.write(UDP_PORT, lineProtocols); - long elapsedForBatchWrite = watch.elapsed(TimeUnit.MILLISECONDS); + long elapsedForBatchWrite = System.currentTimeMillis() - start; System.out.println("performance(ms):write udp with batch of 1000 string:" + elapsedForBatchWrite); //write 1000 single string by udp. - watch = Stopwatch.createStarted(); + start = System.currentTimeMillis(); for (String lineProtocol: lineProtocols){ this.influxDB.write(UDP_PORT, lineProtocol); } this.influxDB.deleteDatabase(dbName); - long elapsedForSingleWrite = watch.elapsed(TimeUnit.MILLISECONDS); + long elapsedForSingleWrite = System.currentTimeMillis() - start; System.out.println("performance(ms):write udp with 1000 single strings:" + elapsedForSingleWrite); Assertions.assertTrue(elapsedForSingleWrite - elapsedForBatchWrite > 0); From ca76250e7a3b6b3ebd17f0eb658af9010253affc Mon Sep 17 00:00:00 2001 From: ivankudibal Date: Wed, 6 Dec 2017 00:08:15 +0100 Subject: [PATCH 084/745] release 2.8 --- pom.xml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pom.xml b/pom.xml index 84e622574..a48133680 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.influxdb influxdb-java jar - 2.8-SNAPSHOT + 2.8 influxdb java bindings Java API to access the InfluxDB REST API http://www.influxdb.org @@ -153,7 +153,7 @@ - org.apache.maven.plugins maven-gpg-plugin @@ -168,7 +168,7 @@ ---> + org.jacoco jacoco-maven-plugin From 563c29d0c81683406b565f89e951e53de35540c8 Mon Sep 17 00:00:00 2001 From: ivankudibal Date: Wed, 6 Dec 2017 00:09:20 +0100 Subject: [PATCH 085/745] ready for 2.9-SNAPSHOT --- pom.xml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pom.xml b/pom.xml index a48133680..bd92d18d7 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.influxdb influxdb-java jar - 2.8 + 2.9-SNAPSHOT influxdb java bindings Java API to access the InfluxDB REST API http://www.influxdb.org @@ -153,7 +153,7 @@ - + +--> org.jacoco jacoco-maven-plugin From 70430d65ff6eed07178b4b2ca9226fdc5644d3b9 Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Thu, 7 Dec 2017 08:59:34 +0100 Subject: [PATCH 086/745] Mention released version --- CHANGELOG.md | 5 ++++- README.md | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 31dfaa616..3ef151b3c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,9 @@ # Changelog -## 2.8 [unreleased] +## 2.9 [unreleased] + + +## 2.8 [2017-12-06] ### Fixes diff --git a/README.md b/README.md index 329c66ea4..9f88ddbbb 100644 --- a/README.md +++ b/README.md @@ -250,7 +250,7 @@ The latest version for maven dependence: org.influxdb influxdb-java - 2.7 + 2.8 ``` Or when using with gradle: From 9f379e2972dfd58e4aef07a3a8ca9e6c08bf0fcf Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Thu, 7 Dec 2017 09:01:23 +0100 Subject: [PATCH 087/745] Mention released version --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 9f88ddbbb..df5069b83 100644 --- a/README.md +++ b/README.md @@ -255,7 +255,7 @@ The latest version for maven dependence: ``` Or when using with gradle: ```groovy -compile 'org.influxdb:influxdb-java:2.7' +compile 'org.influxdb:influxdb-java:2.8' ``` For version change history have a look at [ChangeLog](https://github.com/influxdata/influxdb-java/blob/master/CHANGELOG.md). From 109bac43a933f019e3f9c8336e38b259c4c6c278 Mon Sep 17 00:00:00 2001 From: Artur Keska Date: Sat, 16 Dec 2017 22:08:50 +0100 Subject: [PATCH 088/745] fix #365 - fix problem of connecting to the influx api with URL which does not points to the url root (e.g. localhots:80/influx-api/) This change was tested with following URLs: http://localhost:8086 http://localhost:8086/ Proxied connection (influx behind Nginx) http://loocalhost:8080/influx-api/ --- .../java/org/influxdb/impl/InfluxDBService.java | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/src/main/java/org/influxdb/impl/InfluxDBService.java b/src/main/java/org/influxdb/impl/InfluxDBService.java index 6485f8654..951c051aa 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBService.java +++ b/src/main/java/org/influxdb/impl/InfluxDBService.java @@ -1,9 +1,8 @@ package org.influxdb.impl; -import org.influxdb.dto.QueryResult; - import okhttp3.RequestBody; import okhttp3.ResponseBody; +import org.influxdb.dto.QueryResult; import retrofit2.Call; import retrofit2.http.Body; import retrofit2.http.GET; @@ -37,34 +36,34 @@ interface InfluxDBService { * @param consistency optional The write consistency level required for the write to succeed. * Can be one of one, any, all, quorum. Defaults to all. */ - @POST("/write") + @POST("write") public Call writePoints(@Query(U) String username, @Query(P) String password, @Query(DB) String database, @Query(RP) String retentionPolicy, @Query(PRECISION) String precision, @Query(CONSISTENCY) String consistency, @Body RequestBody batchPoints); - @GET("/query") + @GET("query") public Call query(@Query(U) String username, @Query(P) String password, @Query(DB) String db, @Query(EPOCH) String epoch, @Query(value = Q, encoded = true) String query); - @GET("/query") + @GET("query") public Call query(@Query(U) String username, @Query(P) String password, @Query(DB) String db, @Query(value = Q, encoded = true) String query); - @POST("/query") + @POST("query") public Call postQuery(@Query(U) String username, @Query(P) String password, @Query(DB) String db, @Query(value = Q, encoded = true) String query); - @GET("/query") + @GET("query") public Call query(@Query(U) String username, @Query(P) String password, @Query(value = Q, encoded = true) String query); - @POST("/query") + @POST("query") public Call postQuery(@Query(U) String username, @Query(P) String password, @Query(value = Q, encoded = true) String query); @Streaming - @GET("/query?chunked=true") + @GET("query?chunked=true") public Call query(@Query(U) String username, @Query(P) String password, @Query(DB) String db, @Query(value = Q, encoded = true) String query, @Query(CHUNK_SIZE) int chunkSize); From 7b7410b293b48c681ecf4904d3245a0a8c00de5a Mon Sep 17 00:00:00 2001 From: Artur Keska Date: Mon, 18 Dec 2017 09:56:42 +0100 Subject: [PATCH 089/745] chore(): version changed to .xnet so we can use it as custom --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index bd92d18d7..61110c5c5 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.influxdb influxdb-java jar - 2.9-SNAPSHOT + 2.9.xnet influxdb java bindings Java API to access the InfluxDB REST API http://www.influxdb.org From 75c8664576ed1da5878f7f60a5f1ea0fef73da78 Mon Sep 17 00:00:00 2001 From: Artur Keska Date: Tue, 19 Dec 2017 21:43:05 +0100 Subject: [PATCH 090/745] refac: replaced IP and port property with one URL, so it's easy to pass different URL's for testing (also remoted unused code with selection of API notAPI port) --- .../org/influxdb/InfluxDBFactoryTest.java | 7 +-- src/test/java/org/influxdb/InfluxDBTest.java | 13 ++-- .../java/org/influxdb/PerformanceTests.java | 11 ++-- src/test/java/org/influxdb/TestUtils.java | 59 +++++++------------ src/test/java/org/influxdb/TicketTest.java | 3 +- .../influxdb/impl/ChunkingExceptionTest.java | 28 ++++----- 6 files changed, 48 insertions(+), 73 deletions(-) diff --git a/src/test/java/org/influxdb/InfluxDBFactoryTest.java b/src/test/java/org/influxdb/InfluxDBFactoryTest.java index 99291b522..0ff1a3127 100644 --- a/src/test/java/org/influxdb/InfluxDBFactoryTest.java +++ b/src/test/java/org/influxdb/InfluxDBFactoryTest.java @@ -1,13 +1,12 @@ package org.influxdb; +import okhttp3.OkHttpClient; import org.influxdb.dto.Pong; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.junit.platform.runner.JUnitPlatform; import org.junit.runner.RunWith; -import okhttp3.OkHttpClient; - /** * Test the InfluxDB Factory API. * @@ -22,7 +21,7 @@ public class InfluxDBFactoryTest { */ @Test public void testCreateInfluxDBInstanceWithoutUserNameAndPassword() { - InfluxDB influxDB = InfluxDBFactory.connect("http://" + TestUtils.getInfluxIP() + ":" + TestUtils.getInfluxPORT(true)); + InfluxDB influxDB = InfluxDBFactory.connect(TestUtils.getInfluxURL()); verifyInfluxDBInstance(influxDB); } @@ -38,7 +37,7 @@ private void verifyInfluxDBInstance(InfluxDB influxDB) { */ @Test public void testCreateInfluxDBInstanceWithClientAndWithoutUserNameAndPassword() { - InfluxDB influxDB = InfluxDBFactory.connect("http://" + TestUtils.getInfluxIP() + ":" + TestUtils.getInfluxPORT(true), new OkHttpClient.Builder()); + InfluxDB influxDB = InfluxDBFactory.connect(TestUtils.getInfluxURL(), new OkHttpClient.Builder()); verifyInfluxDBInstance(influxDB); } diff --git a/src/test/java/org/influxdb/InfluxDBTest.java b/src/test/java/org/influxdb/InfluxDBTest.java index e206ffaa0..0edfc8b8e 100644 --- a/src/test/java/org/influxdb/InfluxDBTest.java +++ b/src/test/java/org/influxdb/InfluxDBTest.java @@ -11,7 +11,6 @@ import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; - import org.influxdb.InfluxDB.LogLevel; import org.influxdb.dto.BatchPoints; import org.influxdb.dto.Point; @@ -19,8 +18,8 @@ import org.influxdb.dto.Query; import org.influxdb.dto.QueryResult; import org.influxdb.impl.InfluxDBImpl; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.platform.runner.JUnitPlatform; @@ -47,7 +46,7 @@ public class InfluxDBTest { */ @BeforeEach public void setUp() throws InterruptedException, IOException { - this.influxDB = InfluxDBFactory.connect("http://" + TestUtils.getInfluxIP() + ":" + TestUtils.getInfluxPORT(true), "admin", "admin"); + this.influxDB = InfluxDBFactory.connect(TestUtils.getInfluxURL(), "admin", "admin"); boolean influxDBstarted = false; do { Pong response; @@ -544,7 +543,7 @@ public void testBatchEnabledWithThreadFactoryIsNull() { public void testWrongHostForInfluxdb(){ String errorHost = "10.224.2.122_error_host"; Assertions.assertThrows(RuntimeException.class, () -> { - InfluxDBFactory.connect("http://" + errorHost + ":" + TestUtils.getInfluxPORT(true)); + InfluxDBFactory.connect(TestUtils.getInfluxURL()); }); } @@ -565,7 +564,7 @@ public void testBatchEnabledTwice() { */ @Test public void testCloseInfluxDBClient() { - InfluxDB influxDB = InfluxDBFactory.connect("http://" + TestUtils.getInfluxIP() + ":" + TestUtils.getInfluxPORT(true), "admin", "admin"); + InfluxDB influxDB = InfluxDBFactory.connect(TestUtils.getInfluxURL(), "admin", "admin"); influxDB.enableBatch(1, 1, TimeUnit.SECONDS); Assertions.assertTrue(influxDB.isBatchEnabled()); influxDB.close(); @@ -577,7 +576,7 @@ public void testCloseInfluxDBClient() { */ @Test public void testWriteEnableGzip() { - InfluxDB influxDBForTestGzip = InfluxDBFactory.connect("http://" + TestUtils.getInfluxIP() + ":" + TestUtils.getInfluxPORT(true), "admin", "admin"); + InfluxDB influxDBForTestGzip = InfluxDBFactory.connect(TestUtils.getInfluxURL(), "admin", "admin"); String dbName = "write_unittest_" + System.currentTimeMillis(); try { influxDBForTestGzip.setLogLevel(LogLevel.NONE); @@ -609,7 +608,7 @@ public void testWriteEnableGzip() { */ @Test public void testWriteEnableGzipAndDisableGzip() { - InfluxDB influxDBForTestGzip = InfluxDBFactory.connect("http://" + TestUtils.getInfluxIP() + ":" + TestUtils.getInfluxPORT(true), "admin", "admin"); + InfluxDB influxDBForTestGzip = InfluxDBFactory.connect(TestUtils.getInfluxURL(), "admin", "admin"); try { //test default: gzip is disable Assertions.assertFalse(influxDBForTestGzip.isGzipEnabled()); diff --git a/src/test/java/org/influxdb/PerformanceTests.java b/src/test/java/org/influxdb/PerformanceTests.java index e7c363d0e..2da47ebca 100644 --- a/src/test/java/org/influxdb/PerformanceTests.java +++ b/src/test/java/org/influxdb/PerformanceTests.java @@ -1,20 +1,19 @@ package org.influxdb; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.TimeUnit; import org.influxdb.InfluxDB.LogLevel; import org.influxdb.dto.BatchPoints; import org.influxdb.dto.Point; import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Disabled; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.junit.platform.runner.JUnitPlatform; import org.junit.runner.RunWith; -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.TimeUnit; - @RunWith(JUnitPlatform.class) public class PerformanceTests { private InfluxDB influxDB; @@ -27,7 +26,7 @@ public class PerformanceTests { @BeforeEach public void setUp() { - this.influxDB = InfluxDBFactory.connect("http://" + TestUtils.getInfluxIP() + ":" + TestUtils.getInfluxPORT(true), "root", "root"); + this.influxDB = InfluxDBFactory.connect(TestUtils.getInfluxURL(), "root", "root"); this.influxDB.setLogLevel(LogLevel.NONE); this.influxDB.createDatabase(UDP_DATABASE); } diff --git a/src/test/java/org/influxdb/TestUtils.java b/src/test/java/org/influxdb/TestUtils.java index 7ad8dff24..548ae1bc7 100644 --- a/src/test/java/org/influxdb/TestUtils.java +++ b/src/test/java/org/influxdb/TestUtils.java @@ -4,44 +4,27 @@ public class TestUtils { - public static String getInfluxIP() { - String ip = "127.0.0.1"; - - Map getenv = System.getenv(); - if (getenv.containsKey("INFLUXDB_IP")) { - ip = getenv.get("INFLUXDB_IP"); - } - - return ip; - } - - public static String getRandomMeasurement() { - return "measurement_" + System.nanoTime(); - } - - public static String getInfluxPORT(boolean apiPort) { - String port = "8086"; - - Map getenv = System.getenv(); - if(apiPort) { - if (getenv.containsKey("INFLUXDB_PORT_API")) - port = getenv.get("INFLUXDB_PORT_API"); - } - else { - port = "8096"; - if (getenv.containsKey("INFLUXDB_PORT_COLLECTD")) - port = getenv.get("INFLUXDB_PORT_COLLECTD"); - } - - return port; - } + public static String getInfluxURL() { + String ip = "http://127.0.0.1:8086"; - public static String defaultRetentionPolicy(String version) { - if (version.startsWith("0.") ) { - return "default"; - } else { - return "autogen"; - } - } + Map getenv = System.getenv(); + if (getenv.containsKey("INFLUXDB_API_URL")) { + ip = getenv.get("INFLUXDB_API_URL"); + } + return ip; + } + + + public static String getRandomMeasurement() { + return "measurement_" + System.nanoTime(); + } + + public static String defaultRetentionPolicy(String version) { + if (version.startsWith("0.")) { + return "default"; + } else { + return "autogen"; + } + } } diff --git a/src/test/java/org/influxdb/TicketTest.java b/src/test/java/org/influxdb/TicketTest.java index 828b30a8e..a481ccd4f 100644 --- a/src/test/java/org/influxdb/TicketTest.java +++ b/src/test/java/org/influxdb/TicketTest.java @@ -4,7 +4,6 @@ import java.io.IOException; import java.util.Date; import java.util.concurrent.TimeUnit; - import org.influxdb.InfluxDB.LogLevel; import org.influxdb.dto.BatchPoints; import org.influxdb.dto.Point; @@ -34,7 +33,7 @@ public class TicketTest { */ @BeforeEach public void setUp() throws InterruptedException, IOException { - this.influxDB = InfluxDBFactory.connect("http://" + TestUtils.getInfluxIP() + ":" + TestUtils.getInfluxPORT(true), "admin", "admin"); + this.influxDB = InfluxDBFactory.connect(TestUtils.getInfluxURL(), "admin", "admin"); boolean influxDBstarted = false; do { Pong response; diff --git a/src/test/java/org/influxdb/impl/ChunkingExceptionTest.java b/src/test/java/org/influxdb/impl/ChunkingExceptionTest.java index c81189b92..b7e61e519 100644 --- a/src/test/java/org/influxdb/impl/ChunkingExceptionTest.java +++ b/src/test/java/org/influxdb/impl/ChunkingExceptionTest.java @@ -1,19 +1,16 @@ package org.influxdb.impl; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - +import com.squareup.moshi.JsonAdapter; +import com.squareup.moshi.JsonReader; import java.io.EOFException; import java.io.IOException; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; - +import okhttp3.OkHttpClient; +import okhttp3.ResponseBody; +import okio.Buffer; import org.influxdb.InfluxDB; import org.influxdb.TestUtils; import org.influxdb.dto.Query; @@ -23,13 +20,12 @@ import org.junit.platform.runner.JUnitPlatform; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; - -import com.squareup.moshi.JsonAdapter; -import com.squareup.moshi.JsonReader; - -import okhttp3.OkHttpClient; -import okhttp3.ResponseBody; -import okio.Buffer; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; import retrofit2.Call; import retrofit2.Callback; import retrofit2.Response; @@ -60,7 +56,7 @@ public void testChunkingException(Exception ex, String message) throws IOExcepti when(responseBody.source()).thenReturn(new Buffer()); doThrow(ex).when(adapter).fromJson(any(JsonReader.class)); - String url = "http://" + TestUtils.getInfluxIP() + ":" + TestUtils.getInfluxPORT(true); + String url = TestUtils.getInfluxURL(); InfluxDB influxDB = new InfluxDBImpl(url, "admin", "admin", new OkHttpClient.Builder(), influxDBService, adapter) { @Override public String version() { From 055086243043023c85d804b827783e148564bc38 Mon Sep 17 00:00:00 2001 From: Artur Keska Date: Tue, 19 Dec 2017 22:25:43 +0100 Subject: [PATCH 091/745] refac: test correction: the port is actually needed in one test. Anyway, I believe that it's better to keep the URL in single URL property --- compile-and-test.sh | 72 +- src/test/java/org/influxdb/InfluxDBTest.java | 844 ++++++++++--------- 2 files changed, 494 insertions(+), 422 deletions(-) diff --git a/compile-and-test.sh b/compile-and-test.sh index 67323c40f..88650019e 100755 --- a/compile-and-test.sh +++ b/compile-and-test.sh @@ -2,11 +2,48 @@ # # script to start influxdb and compile influxdb-java with all tests. # +# Note for Windows users: +# In case your docker still uses VirtualBox as a VM, you will probably have to +# inform the containers about the location of your repository. +# Please not that Docker for windows, enables you to mount everything +# from your Users (C:\Users in most cases) directory, so to keep it easy, +# it's better to keep your repository somewhere there. +# If you will decide to put your sources somewhere else, please visit your +# VirtualBox settings and check out the "Shared folder configuration". +# This script uses environment variable BUILD_HOME which should point to this +# project directory (i.e. //c/Users/MyWindowsUserName/Projects/influxdb-java) +# +# Of course you still need bash to launch this script. But this should be no +# problem either to install it (this script was tested with GitExtensions package). + set -e -INFLUXDB_VERSIONS="1.4 1.3 1.2 1.1" +#USE_PROXY=nginx +#INFLUXDB_VERSIONS="1.4 1.3 1.2 1.1" +#JAVA_VERSIONS="3-jdk-8-alpine 3-jdk-9-slim" + +INFLUXDB_VERSIONS="1.4" +JAVA_VERSIONS="3-jdk-8-alpine" + +WORKDIR=/usr/src/mymaven + +if [ -z "$BUILD_HOME" ] ; then + BUILD_HOME=$PWD +fi -JAVA_VERSIONS="3-jdk-8-alpine 3-jdk-9-slim" +INFLUXDB_API_URL=http://influxdb:8086 + +if [ "$USE_PROXY" == "nginx" ] ; then + echo Test with Nginx as proxy + INFLUXDB_API_URL=http://nginx:80/influx-api +fi + +if [ -x /c/Windows/System32/ ] ; then + echo "Detected Windows as a host system" + WORKDIR=//usr/src/mymaven +fi + +echo Using build home: $BUILD_HOME for java_version in ${JAVA_VERSIONS} @@ -23,17 +60,32 @@ do --name influxdb \ --publish 8086:8086 \ --publish 8089:8089/udp \ - --volume ${PWD}/influxdb.conf:/etc/influxdb/influxdb.conf \ + --volume ${BUILD_HOME}/influxdb.conf:/etc/influxdb/influxdb.conf \ influxdb:${version}-alpine - docker run -it --rm \ - --volume $PWD:/usr/src/mymaven \ - --volume $PWD/.m2:/root/.m2 \ - --workdir /usr/src/mymaven \ - --link=influxdb \ - --env INFLUXDB_IP=influxdb \ + if [ "$USE_PROXY" == "nginx" ] ; then + echo Starting Nginx + docker rm -f nginx || true + docker run \ + --detach \ + --name nginx \ + --publish 8888:80 \ + --volume ${BUILD_HOME}/src/test/nginx/nginx.conf:/etc/nginx/nginx.conf:ro \ + --link influxdb:influxdb \ + nginx nginx-debug '-g' 'daemon off;' + NGINX_LINK=--link=nginx + fi + + docker run -it --rm \ + --volume $BUILD_HOME:/usr/src/mymaven \ + --volume $BUILD_HOME/.m2:/root/.m2 \ + --workdir $WORKDIR \ + --link=influxdb $NGINX_LINK \ + --env INFLUXDB_API_URL=$INFLUXDB_API_URL \ maven:${java_version} mvn clean install - docker kill influxdb || true + docker kill influxdb || true + docker kill nginx || true + docker rm -f nginx || true done done diff --git a/src/test/java/org/influxdb/InfluxDBTest.java b/src/test/java/org/influxdb/InfluxDBTest.java index 0edfc8b8e..693494ea3 100644 --- a/src/test/java/org/influxdb/InfluxDBTest.java +++ b/src/test/java/org/influxdb/InfluxDBTest.java @@ -1,6 +1,7 @@ package org.influxdb; import java.io.IOException; +import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -34,211 +35,213 @@ @RunWith(JUnitPlatform.class) public class InfluxDBTest { - private InfluxDB influxDB; - private final static int UDP_PORT = 8089; - private final static String UDP_DATABASE = "udp"; - - /** - * Create a influxDB connection before all tests start. - * - * @throws InterruptedException - * @throws IOException - */ - @BeforeEach - public void setUp() throws InterruptedException, IOException { - this.influxDB = InfluxDBFactory.connect(TestUtils.getInfluxURL(), "admin", "admin"); - boolean influxDBstarted = false; - do { - Pong response; - try { - response = this.influxDB.ping(); - if (!response.getVersion().equalsIgnoreCase("unknown")) { - influxDBstarted = true; - } - } catch (Exception e) { - // NOOP intentional - e.printStackTrace(); - } - Thread.sleep(100L); - } while (!influxDBstarted); - this.influxDB.setLogLevel(LogLevel.NONE); - this.influxDB.createDatabase(UDP_DATABASE); + private InfluxDB influxDB; + private final static int UDP_PORT = 8089; + private final static String UDP_DATABASE = "udp"; + + /** + * Create a influxDB connection before all tests start. + * + * @throws InterruptedException + * @throws IOException + */ + @BeforeEach + public void setUp() throws InterruptedException, IOException { + this.influxDB = InfluxDBFactory.connect(TestUtils.getInfluxURL(), "admin", "admin"); + boolean influxDBstarted = false; + do { + Pong response; + try { + response = this.influxDB.ping(); + if (!response.getVersion().equalsIgnoreCase("unknown")) { + influxDBstarted = true; + } + } catch (Exception e) { + // NOOP intentional + e.printStackTrace(); + } + Thread.sleep(100L); + } while (!influxDBstarted); + this.influxDB.setLogLevel(LogLevel.NONE); + this.influxDB.createDatabase(UDP_DATABASE); System.out.println("################################################################################## "); - System.out.println("# Connected to InfluxDB Version: " + this.influxDB.version() + " #"); - System.out.println("##################################################################################"); - } - - /** - * delete UDP database after all tests end. - */ - @AfterEach - public void cleanup(){ - this.influxDB.deleteDatabase(UDP_DATABASE); - } - - /** - * Test for a ping. - */ - @Test - public void testPing() { - Pong result = this.influxDB.ping(); - Assertions.assertNotNull(result); - Assertions.assertNotEquals(result.getVersion(), "unknown"); - } - - /** - * Test that version works. - */ - @Test - public void testVersion() { - String version = this.influxDB.version(); - Assertions.assertNotNull(version); - Assertions.assertFalse(version.contains("unknown")); - } - - /** - * Simple Test for a query. - */ - @Test - public void testQuery() { - this.influxDB.query(new Query("CREATE DATABASE mydb2", "mydb")); - this.influxDB.query(new Query("DROP DATABASE mydb2", "mydb")); - } - - /** - * Tests for callback query. - */ - @Test - public void testCallbackQuery() throws Throwable { - final AsyncResult result = new AsyncResult<>(); - final Consumer firstQueryConsumer = new Consumer() { - @Override - public void accept(QueryResult queryResult) { - influxDB.query(new Query("DROP DATABASE mydb2", "mydb"), result.resultConsumer, result.errorConsumer); - } - }; - - this.influxDB.query(new Query("CREATE DATABASE mydb2", "mydb"), firstQueryConsumer, result.errorConsumer); - - // Will throw exception in case of error. - result.result(); - } - - /** - * Test that describe Databases works. - */ - @Test - public void testDescribeDatabases() { - String dbName = "unittest_" + System.currentTimeMillis(); - this.influxDB.createDatabase(dbName); - this.influxDB.describeDatabases(); - List result = this.influxDB.describeDatabases(); - Assertions.assertNotNull(result); - Assertions.assertTrue(result.size() > 0); - boolean found = false; - for (String database : result) { - if (database.equals(dbName)) { - found = true; - break; - } - - } - Assertions.assertTrue(found, "It is expected that describeDataBases contents the newly create database."); - this.influxDB.deleteDatabase(dbName); - } - - /** - * Test that Database exists works. - */ - @Test - public void testDatabaseExists() { - String existentdbName = "unittest_1"; - String notExistentdbName = "unittest_2"; - this.influxDB.createDatabase(existentdbName); - boolean checkDbExistence = this.influxDB.databaseExists(existentdbName); - Assertions.assertTrue(checkDbExistence, "It is expected that databaseExists return true for " + existentdbName + " database"); - checkDbExistence = this.influxDB.databaseExists(notExistentdbName); - Assertions.assertFalse(checkDbExistence, "It is expected that databaseExists return false for " + notExistentdbName + " database"); - this.influxDB.deleteDatabase(existentdbName); - } - - /** - * Test that writing to the new lineprotocol. - */ - @Test - public void testWrite() { - String dbName = "write_unittest_" + System.currentTimeMillis(); - this.influxDB.createDatabase(dbName); - String rp = TestUtils.defaultRetentionPolicy(this.influxDB.version()); - BatchPoints batchPoints = BatchPoints.database(dbName).tag("async", "true").retentionPolicy(rp).build(); - Point point1 = Point - .measurement("cpu") - .tag("atag", "test") - .addField("idle", 90L) - .addField("usertime", 9L) - .addField("system", 1L) - .build(); - Point point2 = Point.measurement("disk").tag("atag", "test").addField("used", 80L).addField("free", 1L).build(); - batchPoints.point(point1); - batchPoints.point(point2); - this.influxDB.write(batchPoints); - Query query = new Query("SELECT * FROM cpu GROUP BY *", dbName); - QueryResult result = this.influxDB.query(query); - Assertions.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); - this.influxDB.deleteDatabase(dbName); - } - - /** - * Test the implementation of {@link InfluxDB#write(int, Point)}'s sync support. - */ - @Test - public void testSyncWritePointThroughUDP() throws InterruptedException { - this.influxDB.disableBatch(); - String measurement = TestUtils.getRandomMeasurement(); - Point point = Point.measurement(measurement).tag("atag", "test").addField("used", 80L).addField("free", 1L).build(); - this.influxDB.write(UDP_PORT, point); - Thread.sleep(2000); - Query query = new Query("SELECT * FROM " + measurement + " GROUP BY *", UDP_DATABASE); - QueryResult result = this.influxDB.query(query); - Assertions.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); - } - - /** - * Test the implementation of {@link InfluxDB#write(int, Point)}'s async support. - */ - @Test - public void testAsyncWritePointThroughUDP() throws InterruptedException { - this.influxDB.enableBatch(1, 1, TimeUnit.SECONDS); - try{ - Assertions.assertTrue(this.influxDB.isBatchEnabled()); - String measurement = TestUtils.getRandomMeasurement(); - Point point = Point.measurement(measurement).tag("atag", "test").addField("used", 80L).addField("free", 1L).build(); - this.influxDB.write(UDP_PORT, point); - Thread.sleep(2000); - Query query = new Query("SELECT * FROM " + measurement + " GROUP BY *", UDP_DATABASE); - QueryResult result = this.influxDB.query(query); - Assertions.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); - }finally{ - this.influxDB.disableBatch(); - } - } - - - /** - * Test the implementation of {@link InfluxDB#write(int, Point)}'s async support. + System.out.println("# Connected to InfluxDB Version: " + this.influxDB.version() + " #"); + System.out.println("##################################################################################"); + } + + /** + * delete UDP database after all tests end. + */ + @AfterEach + public void cleanup() { + this.influxDB.deleteDatabase(UDP_DATABASE); + } + + /** + * Test for a ping. + */ + @Test + public void testPing() { + Pong result = this.influxDB.ping(); + Assertions.assertNotNull(result); + Assertions.assertNotEquals(result.getVersion(), "unknown"); + } + + /** + * Test that version works. + */ + @Test + public void testVersion() { + String version = this.influxDB.version(); + Assertions.assertNotNull(version); + Assertions.assertFalse(version.contains("unknown")); + } + + /** + * Simple Test for a query. + */ + @Test + public void testQuery() { + this.influxDB.query(new Query("CREATE DATABASE mydb2", "mydb")); + this.influxDB.query(new Query("DROP DATABASE mydb2", "mydb")); + } + + /** + * Tests for callback query. + */ + @Test + public void testCallbackQuery() throws Throwable { + final AsyncResult result = new AsyncResult<>(); + final Consumer firstQueryConsumer = new Consumer() { + @Override + public void accept(QueryResult queryResult) { + influxDB.query(new Query("DROP DATABASE mydb2", "mydb"), result.resultConsumer, result.errorConsumer); + } + }; + + this.influxDB.query(new Query("CREATE DATABASE mydb2", "mydb"), firstQueryConsumer, result.errorConsumer); + + // Will throw exception in case of error. + result.result(); + } + + /** + * Test that describe Databases works. + */ + @Test + public void testDescribeDatabases() { + String dbName = "unittest_" + System.currentTimeMillis(); + this.influxDB.createDatabase(dbName); + this.influxDB.describeDatabases(); + List result = this.influxDB.describeDatabases(); + Assertions.assertNotNull(result); + Assertions.assertTrue(result.size() > 0); + boolean found = false; + for (String database : result) { + if (database.equals(dbName)) { + found = true; + break; + } + + } + Assertions.assertTrue(found, "It is expected that describeDataBases contents the newly create database."); + this.influxDB.deleteDatabase(dbName); + } + + /** + * Test that Database exists works. + */ + @Test + public void testDatabaseExists() { + String existentdbName = "unittest_1"; + String notExistentdbName = "unittest_2"; + this.influxDB.createDatabase(existentdbName); + boolean checkDbExistence = this.influxDB.databaseExists(existentdbName); + Assertions.assertTrue(checkDbExistence, "It is expected that databaseExists return true for " + existentdbName + " database"); + checkDbExistence = this.influxDB.databaseExists(notExistentdbName); + Assertions.assertFalse(checkDbExistence, "It is expected that databaseExists return false for " + notExistentdbName + " database"); + this.influxDB.deleteDatabase(existentdbName); + } + + /** + * Test that writing to the new lineprotocol. + */ + @Test + public void testWrite() { + String dbName = "write_unittest_" + System.currentTimeMillis(); + this.influxDB.createDatabase(dbName); + String rp = TestUtils.defaultRetentionPolicy(this.influxDB.version()); + BatchPoints batchPoints = BatchPoints.database(dbName).tag("async", "true").retentionPolicy(rp).build(); + Point point1 = Point + .measurement("cpu") + .tag("atag", "test") + .addField("idle", 90L) + .addField("usertime", 9L) + .addField("system", 1L) + .build(); + Point point2 = Point.measurement("disk").tag("atag", "test").addField("used", 80L).addField("free", 1L).build(); + batchPoints.point(point1); + batchPoints.point(point2); + this.influxDB.write(batchPoints); + Query query = new Query("SELECT * FROM cpu GROUP BY *", dbName); + QueryResult result = this.influxDB.query(query); + Assertions.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); + this.influxDB.deleteDatabase(dbName); + } + + /** + * Test the implementation of {@link InfluxDB#write(int, Point)}'s sync + * support. + */ + @Test + public void testSyncWritePointThroughUDP() throws InterruptedException { + this.influxDB.disableBatch(); + String measurement = TestUtils.getRandomMeasurement(); + Point point = Point.measurement(measurement).tag("atag", "test").addField("used", 80L).addField("free", 1L).build(); + this.influxDB.write(UDP_PORT, point); + Thread.sleep(2000); + Query query = new Query("SELECT * FROM " + measurement + " GROUP BY *", UDP_DATABASE); + QueryResult result = this.influxDB.query(query); + Assertions.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); + } + + /** + * Test the implementation of {@link InfluxDB#write(int, Point)}'s async + * support. + */ + @Test + public void testAsyncWritePointThroughUDP() throws InterruptedException { + this.influxDB.enableBatch(1, 1, TimeUnit.SECONDS); + try { + Assertions.assertTrue(this.influxDB.isBatchEnabled()); + String measurement = TestUtils.getRandomMeasurement(); + Point point = Point.measurement(measurement).tag("atag", "test").addField("used", 80L).addField("free", 1L).build(); + this.influxDB.write(UDP_PORT, point); + Thread.sleep(2000); + Query query = new Query("SELECT * FROM " + measurement + " GROUP BY *", UDP_DATABASE); + QueryResult result = this.influxDB.query(query); + Assertions.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); + } finally { + this.influxDB.disableBatch(); + } + } + + /** + * Test the implementation of {@link InfluxDB#write(int, Point)}'s async + * support. */ @Test public void testAsyncWritePointThroughUDPFail() { this.influxDB.enableBatch(1, 1, TimeUnit.SECONDS); - try{ + try { Assertions.assertTrue(this.influxDB.isBatchEnabled()); String measurement = TestUtils.getRandomMeasurement(); Point point = Point.measurement(measurement).tag("atag", "test").addField("used", 80L).addField("free", 1L).build(); Thread.currentThread().interrupt(); Assertions.assertThrows(RuntimeException.class, () -> { - this.influxDB.write(UDP_PORT, point); - }); - }finally{ + this.influxDB.write(UDP_PORT, point); + }); + } finally { this.influxDB.disableBatch(); } } @@ -258,22 +261,23 @@ public void testWriteStringData() { this.influxDB.deleteDatabase(dbName); } - /** - * Test writing to the database using string protocol with simpler interface. - */ - @Test - public void testWriteStringDataSimple() { - String dbName = "write_unittest_" + System.currentTimeMillis(); - this.influxDB.createDatabase(dbName); - String rp = TestUtils.defaultRetentionPolicy(this.influxDB.version()); - this.influxDB.setDatabase(dbName); - this.influxDB.setRetentionPolicy(rp); - this.influxDB.write("cpu,atag=test idle=90,usertime=9,system=1"); - Query query = new Query("SELECT * FROM cpu GROUP BY *", dbName); - QueryResult result = this.influxDB.query(query); - Assertions.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); - this.influxDB.deleteDatabase(dbName); - } + /** + * Test writing to the database using string protocol with simpler + * interface. + */ + @Test + public void testWriteStringDataSimple() { + String dbName = "write_unittest_" + System.currentTimeMillis(); + this.influxDB.createDatabase(dbName); + String rp = TestUtils.defaultRetentionPolicy(this.influxDB.version()); + this.influxDB.setDatabase(dbName); + this.influxDB.setRetentionPolicy(rp); + this.influxDB.write("cpu,atag=test idle=90,usertime=9,system=1"); + Query query = new Query("SELECT * FROM cpu GROUP BY *", dbName); + QueryResult result = this.influxDB.query(query); + Assertions.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); + this.influxDB.deleteDatabase(dbName); + } /** * Test writing to the database using string protocol through UDP. @@ -290,14 +294,15 @@ public void testWriteStringDataThroughUDP() throws InterruptedException { } /** - * Test writing multiple records to the database using string protocol through UDP. + * Test writing multiple records to the database using string protocol + * through UDP. */ @Test public void testWriteMultipleStringDataThroughUDP() throws InterruptedException { String measurement = TestUtils.getRandomMeasurement(); - this.influxDB.write(UDP_PORT, measurement + ",atag=test1 idle=100,usertime=10,system=1\n" + - measurement + ",atag=test2 idle=200,usertime=20,system=2\n" + - measurement + ",atag=test3 idle=300,usertime=30,system=3"); + this.influxDB.write(UDP_PORT, measurement + ",atag=test1 idle=100,usertime=10,system=1\n" + + measurement + ",atag=test2 idle=200,usertime=20,system=2\n" + + measurement + ",atag=test3 idle=300,usertime=30,system=3"); Thread.sleep(2000); Query query = new Query("SELECT * FROM " + measurement + " GROUP BY *", UDP_DATABASE); QueryResult result = this.influxDB.query(query); @@ -309,7 +314,8 @@ public void testWriteMultipleStringDataThroughUDP() throws InterruptedException } /** - * Test writing multiple separate records to the database using string protocol through UDP. + * Test writing multiple separate records to the database using string + * protocol through UDP. */ @Test public void testWriteMultipleStringDataLinesThroughUDP() throws InterruptedException { @@ -330,9 +336,11 @@ public void testWriteMultipleStringDataLinesThroughUDP() throws InterruptedExcep } /** - * When batch of points' size is over UDP limit, the expected exception - * is java.lang.RuntimeException: java.net.SocketException: - * The message is larger than the maximum supported by the underlying transport: Datagram send failed + * When batch of points' size is over UDP limit, the expected exception is + * java.lang.RuntimeException: java.net.SocketException: The message is + * larger than the maximum supported by the underlying transport: Datagram + * send failed + * * @throws Exception */ @Test @@ -341,19 +349,19 @@ public void testWriteMultipleStringDataLinesOverUDPLimit() throws Exception { List lineProtocols = new ArrayList(); int i = 0; int length = 0; - while ( true ) { + while (true) { Point point = Point.measurement("udp_single_poit").addField("v", i).build(); String lineProtocol = point.lineProtocol(); length += (lineProtocol.getBytes("utf-8")).length; lineProtocols.add(lineProtocol); - if( length > 65535 ){ + if (length > 65535) { break; } } //write batch of string which size is over 64K Assertions.assertThrows(RuntimeException.class, () -> { - this.influxDB.write(UDP_PORT, lineProtocols); - }); + this.influxDB.write(UDP_PORT, lineProtocols); + }); } /** @@ -376,30 +384,32 @@ public void testWriteMultipleStringData() { this.influxDB.deleteDatabase(dbName); } - /** - * Test writing multiple records to the database using string protocol with simpler interface. - */ - @Test - public void testWriteMultipleStringDataSimple() { - String dbName = "write_unittest_" + System.currentTimeMillis(); - this.influxDB.createDatabase(dbName); - String rp = TestUtils.defaultRetentionPolicy(this.influxDB.version()); - this.influxDB.setDatabase(dbName); - this.influxDB.setRetentionPolicy(rp); + /** + * Test writing multiple records to the database using string protocol with + * simpler interface. + */ + @Test + public void testWriteMultipleStringDataSimple() { + String dbName = "write_unittest_" + System.currentTimeMillis(); + this.influxDB.createDatabase(dbName); + String rp = TestUtils.defaultRetentionPolicy(this.influxDB.version()); + this.influxDB.setDatabase(dbName); + this.influxDB.setRetentionPolicy(rp); - this.influxDB.write("cpu,atag=test1 idle=100,usertime=10,system=1\ncpu,atag=test2 idle=200,usertime=20,system=2\ncpu,atag=test3 idle=300,usertime=30,system=3"); - Query query = new Query("SELECT * FROM cpu GROUP BY *", dbName); - QueryResult result = this.influxDB.query(query); + this.influxDB.write("cpu,atag=test1 idle=100,usertime=10,system=1\ncpu,atag=test2 idle=200,usertime=20,system=2\ncpu,atag=test3 idle=300,usertime=30,system=3"); + Query query = new Query("SELECT * FROM cpu GROUP BY *", dbName); + QueryResult result = this.influxDB.query(query); - Assertions.assertEquals(result.getResults().get(0).getSeries().size(), 3); - Assertions.assertEquals("test1", result.getResults().get(0).getSeries().get(0).getTags().get("atag")); - Assertions.assertEquals("test2", result.getResults().get(0).getSeries().get(1).getTags().get("atag")); - Assertions.assertEquals("test3", result.getResults().get(0).getSeries().get(2).getTags().get("atag")); - this.influxDB.deleteDatabase(dbName); - } + Assertions.assertEquals(result.getResults().get(0).getSeries().size(), 3); + Assertions.assertEquals("test1", result.getResults().get(0).getSeries().get(0).getTags().get("atag")); + Assertions.assertEquals("test2", result.getResults().get(0).getSeries().get(1).getTags().get("atag")); + Assertions.assertEquals("test3", result.getResults().get(0).getSeries().get(2).getTags().get("atag")); + this.influxDB.deleteDatabase(dbName); + } /** - * Test writing multiple separate records to the database using string protocol. + * Test writing multiple separate records to the database using string + * protocol. */ @Test public void testWriteMultipleStringDataLines() { @@ -422,54 +432,56 @@ public void testWriteMultipleStringDataLines() { this.influxDB.deleteDatabase(dbName); } - /** - * Test writing multiple separate records to the database using string protocol with simpler interface. - */ - @Test - public void testWriteMultipleStringDataLinesSimple() { - String dbName = "write_unittest_" + System.currentTimeMillis(); - this.influxDB.createDatabase(dbName); - String rp = TestUtils.defaultRetentionPolicy(this.influxDB.version()); - this.influxDB.setDatabase(dbName); - this.influxDB.setRetentionPolicy(rp); - - this.influxDB.write(Arrays.asList( - "cpu,atag=test1 idle=100,usertime=10,system=1", - "cpu,atag=test2 idle=200,usertime=20,system=2", - "cpu,atag=test3 idle=300,usertime=30,system=3" - )); - Query query = new Query("SELECT * FROM cpu GROUP BY *", dbName); - QueryResult result = this.influxDB.query(query); - - Assertions.assertEquals(result.getResults().get(0).getSeries().size(), 3); - Assertions.assertEquals("test1", result.getResults().get(0).getSeries().get(0).getTags().get("atag")); - Assertions.assertEquals("test2", result.getResults().get(0).getSeries().get(1).getTags().get("atag")); - Assertions.assertEquals("test3", result.getResults().get(0).getSeries().get(2).getTags().get("atag")); - this.influxDB.deleteDatabase(dbName); - } - - /** - * Test that creating database which name is composed of numbers only works - */ - @Test - public void testCreateNumericNamedDatabase() { - String numericDbName = "123"; - - this.influxDB.createDatabase(numericDbName); - List result = this.influxDB.describeDatabases(); - Assertions.assertTrue(result.contains(numericDbName)); - this.influxDB.deleteDatabase(numericDbName); - } - - /** - * Test that creating database which name is empty will throw expected exception + /** + * Test writing multiple separate records to the database using string + * protocol with simpler interface. + */ + @Test + public void testWriteMultipleStringDataLinesSimple() { + String dbName = "write_unittest_" + System.currentTimeMillis(); + this.influxDB.createDatabase(dbName); + String rp = TestUtils.defaultRetentionPolicy(this.influxDB.version()); + this.influxDB.setDatabase(dbName); + this.influxDB.setRetentionPolicy(rp); + + this.influxDB.write(Arrays.asList( + "cpu,atag=test1 idle=100,usertime=10,system=1", + "cpu,atag=test2 idle=200,usertime=20,system=2", + "cpu,atag=test3 idle=300,usertime=30,system=3" + )); + Query query = new Query("SELECT * FROM cpu GROUP BY *", dbName); + QueryResult result = this.influxDB.query(query); + + Assertions.assertEquals(result.getResults().get(0).getSeries().size(), 3); + Assertions.assertEquals("test1", result.getResults().get(0).getSeries().get(0).getTags().get("atag")); + Assertions.assertEquals("test2", result.getResults().get(0).getSeries().get(1).getTags().get("atag")); + Assertions.assertEquals("test3", result.getResults().get(0).getSeries().get(2).getTags().get("atag")); + this.influxDB.deleteDatabase(dbName); + } + + /** + * Test that creating database which name is composed of numbers only works + */ + @Test + public void testCreateNumericNamedDatabase() { + String numericDbName = "123"; + + this.influxDB.createDatabase(numericDbName); + List result = this.influxDB.describeDatabases(); + Assertions.assertTrue(result.contains(numericDbName)); + this.influxDB.deleteDatabase(numericDbName); + } + + /** + * Test that creating database which name is empty will throw expected + * exception */ @Test public void testCreateEmptyNamedDatabase() { String emptyName = ""; Assertions.assertThrows(IllegalArgumentException.class, () -> { - this.influxDB.createDatabase(emptyName); - }); + this.influxDB.createDatabase(emptyName); + }); } /** @@ -487,89 +499,92 @@ public void testCreateDatabaseWithNameContainHyphen() { } } - /** - * Test the implementation of {@link InfluxDB#isBatchEnabled()}. - */ - @Test - public void testIsBatchEnabled() { - Assertions.assertFalse(this.influxDB.isBatchEnabled()); - - this.influxDB.enableBatch(1, 1, TimeUnit.SECONDS); - Assertions.assertTrue(this.influxDB.isBatchEnabled()); - - this.influxDB.disableBatch(); - Assertions.assertFalse(this.influxDB.isBatchEnabled()); - } - - /** - * Test the implementation of {@link InfluxDB#enableBatch(int, int, TimeUnit, ThreadFactory)}. - */ - @Test - public void testBatchEnabledWithThreadFactory() { - final String threadName = "async_influxdb_write"; - this.influxDB.enableBatch(1, 1, TimeUnit.SECONDS, new ThreadFactory() { - - @Override - public Thread newThread(Runnable r) { - Thread thread = new Thread(r); - thread.setName(threadName); - return thread; - } - }); - Set threads = Thread.getAllStackTraces().keySet(); - boolean existThreadWithSettedName = false; - for(Thread thread: threads){ - if(thread.getName().equalsIgnoreCase(threadName)){ - existThreadWithSettedName = true; - break; - } - - } - Assertions.assertTrue(existThreadWithSettedName); - this.influxDB.disableBatch(); - } - - @Test - public void testBatchEnabledWithThreadFactoryIsNull() { - Assertions.assertThrows(NullPointerException.class, () -> { - this.influxDB.enableBatch(1, 1, TimeUnit.SECONDS, null); - }); - } - - /** - * Test the implementation of {@link InfluxDBImpl#InfluxDBImpl(String, String, String, okhttp3.OkHttpClient.Builder)}. - */ - @Test - public void testWrongHostForInfluxdb(){ - String errorHost = "10.224.2.122_error_host"; - Assertions.assertThrows(RuntimeException.class, () -> { - InfluxDBFactory.connect(TestUtils.getInfluxURL()); - }); - } - - @Test - public void testBatchEnabledTwice() { - this.influxDB.enableBatch(1, 1, TimeUnit.SECONDS); - try{ - Assertions.assertThrows(IllegalStateException.class, () -> { - this.influxDB.enableBatch(1, 1, TimeUnit.SECONDS); - }); - } finally { - this.influxDB.disableBatch(); - } - } - - /** - * Test the implementation of {@link InfluxDB#close()}. - */ - @Test - public void testCloseInfluxDBClient() { - InfluxDB influxDB = InfluxDBFactory.connect(TestUtils.getInfluxURL(), "admin", "admin"); - influxDB.enableBatch(1, 1, TimeUnit.SECONDS); - Assertions.assertTrue(influxDB.isBatchEnabled()); - influxDB.close(); - Assertions.assertFalse(influxDB.isBatchEnabled()); - } + /** + * Test the implementation of {@link InfluxDB#isBatchEnabled()}. + */ + @Test + public void testIsBatchEnabled() { + Assertions.assertFalse(this.influxDB.isBatchEnabled()); + + this.influxDB.enableBatch(1, 1, TimeUnit.SECONDS); + Assertions.assertTrue(this.influxDB.isBatchEnabled()); + + this.influxDB.disableBatch(); + Assertions.assertFalse(this.influxDB.isBatchEnabled()); + } + + /** + * Test the implementation of + * {@link InfluxDB#enableBatch(int, int, TimeUnit, ThreadFactory)}. + */ + @Test + public void testBatchEnabledWithThreadFactory() { + final String threadName = "async_influxdb_write"; + this.influxDB.enableBatch(1, 1, TimeUnit.SECONDS, new ThreadFactory() { + + @Override + public Thread newThread(Runnable r) { + Thread thread = new Thread(r); + thread.setName(threadName); + return thread; + } + }); + Set threads = Thread.getAllStackTraces().keySet(); + boolean existThreadWithSettedName = false; + for (Thread thread : threads) { + if (thread.getName().equalsIgnoreCase(threadName)) { + existThreadWithSettedName = true; + break; + } + + } + Assertions.assertTrue(existThreadWithSettedName); + this.influxDB.disableBatch(); + } + + @Test + public void testBatchEnabledWithThreadFactoryIsNull() { + Assertions.assertThrows(NullPointerException.class, () -> { + this.influxDB.enableBatch(1, 1, TimeUnit.SECONDS, null); + }); + } + + /** + * Test the implementation of + * {@link InfluxDBImpl#InfluxDBImpl(String, String, String, okhttp3.OkHttpClient.Builder)}. + */ + @Test + public void testWrongHostForInfluxdb() { + String errorHost = "10.224.2.122_error_host"; + Assertions.assertThrows(RuntimeException.class, () -> { + URL uri = new URL(TestUtils.getInfluxURL()); + InfluxDBFactory.connect(uri.getProtocol() + "://" + errorHost + ":" + uri.getPort() + "/" + uri.getPath()); + }); + } + + @Test + public void testBatchEnabledTwice() { + this.influxDB.enableBatch(1, 1, TimeUnit.SECONDS); + try { + Assertions.assertThrows(IllegalStateException.class, () -> { + this.influxDB.enableBatch(1, 1, TimeUnit.SECONDS); + }); + } finally { + this.influxDB.disableBatch(); + } + } + + /** + * Test the implementation of {@link InfluxDB#close()}. + */ + @Test + public void testCloseInfluxDBClient() { + InfluxDB influxDB = InfluxDBFactory.connect(TestUtils.getInfluxURL(), "admin", "admin"); + influxDB.enableBatch(1, 1, TimeUnit.SECONDS); + Assertions.assertTrue(influxDB.isBatchEnabled()); + influxDB.close(); + Assertions.assertFalse(influxDB.isBatchEnabled()); + } /** * Test writing multiple separate records to the database by Gzip compress @@ -604,7 +619,8 @@ public void testWriteEnableGzip() { /** * Test the implementation of flag control for gzip such as: - * {@link InfluxDB#disableGzip()}} and {@link InfluxDB#isBatchEnabled()}},etc + * {@link InfluxDB#disableGzip()}} and + * {@link InfluxDB#isBatchEnabled()}},etc */ @Test public void testWriteEnableGzipAndDisableGzip() { @@ -623,6 +639,7 @@ public void testWriteEnableGzipAndDisableGzip() { /** * Test chunking. + * * @throws InterruptedException */ @Test @@ -650,7 +667,8 @@ public void testChunking() throws InterruptedException { @Override public void accept(QueryResult result) { queue.add(result); - }}); + } + }); Thread.sleep(2000); this.influxDB.deleteDatabase(dbName); @@ -673,6 +691,7 @@ public void accept(QueryResult result) { /** * Test chunking edge case. + * * @throws InterruptedException */ @Test @@ -697,6 +716,7 @@ public void accept(QueryResult result) { /** * Test chunking on 0.13 and 1.0. + * * @throws InterruptedException */ @Test() @@ -705,14 +725,14 @@ public void testChunkingOldVersion() throws InterruptedException { if (this.influxDB.version().startsWith("0.") || this.influxDB.version().startsWith("1.0")) { Assertions.assertThrows(RuntimeException.class, () -> { - String dbName = "write_unittest_" + System.currentTimeMillis(); - Query query = new Query("SELECT * FROM cpu GROUP BY *", dbName); - this.influxDB.query(query, 10, new Consumer() { - @Override - public void accept(QueryResult result) { - } - }); - }); + String dbName = "write_unittest_" + System.currentTimeMillis(); + Query query = new Query("SELECT * FROM cpu GROUP BY *", dbName); + this.influxDB.query(query, 10, new Consumer() { + @Override + public void accept(QueryResult result) { + } + }); + }); } } @@ -743,38 +763,38 @@ public void testFlushPendingWritesWhenBatchingEnabled() { public void testFlushThrowsIfBatchingIsNotEnabled() { Assertions.assertFalse(this.influxDB.isBatchEnabled()); Assertions.assertThrows(IllegalStateException.class, () -> { - this.influxDB.flush(); - }); - } - - /** - * Test creation and deletion of retention policies - */ - @Test - public void testCreateDropRetentionPolicies() { - String dbName = "rpTest_" + System.currentTimeMillis(); - this.influxDB.createDatabase(dbName); - - this.influxDB.createRetentionPolicy("testRP1", dbName, "30h", 2, false); - this.influxDB.createRetentionPolicy("testRP2", dbName, "10d", "20m", 2, false); - this.influxDB.createRetentionPolicy("testRP3", dbName, "2d4w", "20m", 2); - - Query query = new Query("SHOW RETENTION POLICIES", dbName); - QueryResult result = this.influxDB.query(query); - Assertions.assertNull(result.getError()); - List> retentionPolicies = result.getResults().get(0).getSeries().get(0).getValues(); - Assertions.assertTrue(retentionPolicies.get(1).contains("testRP1")); - Assertions.assertTrue(retentionPolicies.get(2).contains("testRP2")); - Assertions.assertTrue(retentionPolicies.get(3).contains("testRP3")); - - this.influxDB.dropRetentionPolicy("testRP1", dbName); - this.influxDB.dropRetentionPolicy("testRP2", dbName); - this.influxDB.dropRetentionPolicy("testRP3", dbName); - - result = this.influxDB.query(query); - Assertions.assertNull(result.getError()); - retentionPolicies = result.getResults().get(0).getSeries().get(0).getValues(); - Assertions.assertTrue(retentionPolicies.size() == 1); - } + this.influxDB.flush(); + }); + } + + /** + * Test creation and deletion of retention policies + */ + @Test + public void testCreateDropRetentionPolicies() { + String dbName = "rpTest_" + System.currentTimeMillis(); + this.influxDB.createDatabase(dbName); + + this.influxDB.createRetentionPolicy("testRP1", dbName, "30h", 2, false); + this.influxDB.createRetentionPolicy("testRP2", dbName, "10d", "20m", 2, false); + this.influxDB.createRetentionPolicy("testRP3", dbName, "2d4w", "20m", 2); + + Query query = new Query("SHOW RETENTION POLICIES", dbName); + QueryResult result = this.influxDB.query(query); + Assertions.assertNull(result.getError()); + List> retentionPolicies = result.getResults().get(0).getSeries().get(0).getValues(); + Assertions.assertTrue(retentionPolicies.get(1).contains("testRP1")); + Assertions.assertTrue(retentionPolicies.get(2).contains("testRP2")); + Assertions.assertTrue(retentionPolicies.get(3).contains("testRP3")); + + this.influxDB.dropRetentionPolicy("testRP1", dbName); + this.influxDB.dropRetentionPolicy("testRP2", dbName); + this.influxDB.dropRetentionPolicy("testRP3", dbName); + + result = this.influxDB.query(query); + Assertions.assertNull(result.getError()); + retentionPolicies = result.getResults().get(0).getSeries().get(0).getValues(); + Assertions.assertTrue(retentionPolicies.size() == 1); + } } From 7cb823b8326502cf3b61769467e5d6c7c472f87d Mon Sep 17 00:00:00 2001 From: Artur Keska Date: Tue, 19 Dec 2017 22:30:39 +0100 Subject: [PATCH 092/745] chore: restored correct version number --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 61110c5c5..bd92d18d7 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.influxdb influxdb-java jar - 2.9.xnet + 2.9-SNAPSHOT influxdb java bindings Java API to access the InfluxDB REST API http://www.influxdb.org From 2887f8f0278c4be56fd76055b413ee55359f0100 Mon Sep 17 00:00:00 2001 From: Artur Keska Date: Wed, 20 Dec 2017 00:09:00 +0100 Subject: [PATCH 093/745] test: added tests for testing connectivity over nginx as a proxy - changed compile-and-test.sh scripts, so it starts both standard and "proxy" - added simple nginx config for docker container - tests related to UDP communication moved to separate class and skipped in case of http proxy testing --- compile-and-test.sh | 121 +++-- pom.xml | 511 +++++++++--------- src/test/java/org/influxdb/InfluxDBTest.java | 147 +---- .../java/org/influxdb/UDPInfluxDBTest.java | 214 ++++++++ src/test/nginx/nginx.conf | 56 ++ 5 files changed, 597 insertions(+), 452 deletions(-) create mode 100644 src/test/java/org/influxdb/UDPInfluxDBTest.java create mode 100644 src/test/nginx/nginx.conf diff --git a/compile-and-test.sh b/compile-and-test.sh index 88650019e..0c5659519 100755 --- a/compile-and-test.sh +++ b/compile-and-test.sh @@ -18,24 +18,17 @@ set -e -#USE_PROXY=nginx -#INFLUXDB_VERSIONS="1.4 1.3 1.2 1.1" -#JAVA_VERSIONS="3-jdk-8-alpine 3-jdk-9-slim" -INFLUXDB_VERSIONS="1.4" -JAVA_VERSIONS="3-jdk-8-alpine" +INFLUXDB_VERSIONS="1.4 1.3 1.2 1.1" +JAVA_VERSIONS="3-jdk-8-alpine 3-jdk-9-slim" WORKDIR=/usr/src/mymaven if [ -z "$BUILD_HOME" ] ; then - BUILD_HOME=$PWD -fi - -INFLUXDB_API_URL=http://influxdb:8086 - -if [ "$USE_PROXY" == "nginx" ] ; then - echo Test with Nginx as proxy - INFLUXDB_API_URL=http://nginx:80/influx-api + BUILD_HOME=$PWD + if [ -x /c/Windows/System32/ ] ; then + BUILD_HOME=/$PWD + fi fi if [ -x /c/Windows/System32/ ] ; then @@ -45,47 +38,67 @@ fi echo Using build home: $BUILD_HOME +function run_test { + USE_PROXY=$1 + + INFLUXDB_API_URL=http://influxdb:8086 + if [ "$USE_PROXY" == "nginx" ] ; then + echo Test with Nginx as proxy + INFLUXDB_API_URL=http://nginx:8080/influx-api/ + fi + + + for java_version in ${JAVA_VERSIONS} + do + echo "Run tests with maven:${java_version}" + for version in ${INFLUXDB_VERSIONS} + do + echo "Tesing againts influxdb ${version}" + docker kill influxdb || true + docker rm influxdb || true + docker pull influxdb:${version}-alpine || true + docker run \ + --detach \ + --name influxdb \ + --publish 8086:8086 \ + --publish 8089:8089/udp \ + --volume ${BUILD_HOME}/influxdb.conf:/etc/influxdb/influxdb.conf \ + influxdb:${version}-alpine + + if [ "$USE_PROXY" == "nginx" ] ; then + echo Starting Nginx + docker kill nginx || true + docker rm nginx || true + echo ----- STARTING NGINX CONTAINER ----- + docker run \ + --detach \ + --name nginx \ + --publish 8888:8080 \ + --volume ${BUILD_HOME}/src/test/nginx/nginx.conf:/etc/nginx/conf.d/default.conf:ro \ + --link influxdb:influxdb \ + nginx nginx '-g' 'daemon off;' + + NGINX_LINK=--link=nginx + SKIP_TESTS=-DsomeModule.test.excludes="**/*UDPInfluxDBTest*" + fi + + docker run -it --rm \ + --volume $BUILD_HOME:/usr/src/mymaven \ + --volume $BUILD_HOME/.m2:/root/.m2 \ + --workdir $WORKDIR \ + --link=influxdb $NGINX_LINK \ + --env INFLUXDB_API_URL=$INFLUXDB_API_URL \ + maven:${java_version} mvn clean install $SKIP_TESTS -for java_version in ${JAVA_VERSIONS} -do - echo "Run tests with maven:${java_version}" -for version in ${INFLUXDB_VERSIONS} -do - echo "Tesing againts influxdb ${version}" - docker kill influxdb || true - docker rm influxdb || true - docker pull influxdb:${version}-alpine || true - docker run \ - --detach \ - --name influxdb \ - --publish 8086:8086 \ - --publish 8089:8089/udp \ - --volume ${BUILD_HOME}/influxdb.conf:/etc/influxdb/influxdb.conf \ - influxdb:${version}-alpine + docker kill influxdb || true + docker kill nginx || true + docker rm -f nginx || true + done + done +} - if [ "$USE_PROXY" == "nginx" ] ; then - echo Starting Nginx - docker rm -f nginx || true - docker run \ - --detach \ - --name nginx \ - --publish 8888:80 \ - --volume ${BUILD_HOME}/src/test/nginx/nginx.conf:/etc/nginx/nginx.conf:ro \ - --link influxdb:influxdb \ - nginx nginx-debug '-g' 'daemon off;' - NGINX_LINK=--link=nginx - fi - - docker run -it --rm \ - --volume $BUILD_HOME:/usr/src/mymaven \ - --volume $BUILD_HOME/.m2:/root/.m2 \ - --workdir $WORKDIR \ - --link=influxdb $NGINX_LINK \ - --env INFLUXDB_API_URL=$INFLUXDB_API_URL \ - maven:${java_version} mvn clean install +################################################################################ +################################################################################ - docker kill influxdb || true - docker kill nginx || true - docker rm -f nginx || true -done -done +run_test +run_test nginx diff --git a/pom.xml b/pom.xml index bd92d18d7..847cb7c0b 100644 --- a/pom.xml +++ b/pom.xml @@ -1,265 +1,270 @@ - 4.0.0 - org.influxdb - influxdb-java - jar - 2.9-SNAPSHOT - influxdb java bindings - Java API to access the InfluxDB REST API - http://www.influxdb.org + 4.0.0 + org.influxdb + influxdb-java + jar + 2.9-SNAPSHOT + influxdb java bindings + Java API to access the InfluxDB REST API + http://www.influxdb.org - - 3.2.1 - + + 3.2.1 + - - - The MIT License (MIT) - http://www.opensource.org/licenses/mit-license.php - repo - - - - UTF-8 - + + + The MIT License (MIT) + http://www.opensource.org/licenses/mit-license.php + repo + + + + UTF-8 + - - scm:git:git@github.com:influxdata/influxdb-java.git - scm:git:git@github.com:influxdata/influxdb-java.git - git@github.com:influxdata/influxdb-java.git - + + scm:git:git@github.com:influxdata/influxdb-java.git + scm:git:git@github.com:influxdata/influxdb-java.git + git@github.com:influxdata/influxdb-java.git + - - - majst01 - Stefan Majer - stefan.majer@gmail.com - - - - - - org.codehaus.mojo - findbugs-maven-plugin - 3.0.5 - - true - - target/site - - - - + + + majst01 + Stefan Majer + stefan.majer@gmail.com + + + + + + org.codehaus.mojo + findbugs-maven-plugin + 3.0.5 + + true + + target/site + + + + - - - ossrh - https://oss.sonatype.org/content/repositories/snapshots - - - ossrh - https://oss.sonatype.org/service/local/staging/deploy/maven2/ - - + + + ossrh + https://oss.sonatype.org/content/repositories/snapshots + + + ossrh + https://oss.sonatype.org/service/local/staging/deploy/maven2/ + + - - - - - org.apache.maven.plugins - maven-compiler-plugin - 3.7.0 - - 1.8 - 1.8 - - - - org.apache.maven.plugins - maven-surefire-plugin - 2.20 - - - org.apache.maven.plugins - maven-site-plugin - 3.6 - - - org.apache.maven.plugins - maven-clean-plugin - 3.0.0 - - - org.apache.maven.plugins - maven-deploy-plugin - 2.8.2 - - - org.apache.maven.plugins - maven-install-plugin - 2.5.2 - - - org.apache.maven.plugins - maven-jar-plugin - 3.0.2 - - - org.apache.maven.plugins - maven-resources-plugin - 3.0.1 - - - - + + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.7.0 + + 1.8 + 1.8 + + + + org.apache.maven.plugins + maven-surefire-plugin + 2.20 + + + ${someModule.test.excludes} + + + + + org.apache.maven.plugins + maven-site-plugin + 3.6 + + + org.apache.maven.plugins + maven-clean-plugin + 3.0.0 + + + org.apache.maven.plugins + maven-deploy-plugin + 2.8.2 + + + org.apache.maven.plugins + maven-install-plugin + 2.5.2 + + + org.apache.maven.plugins + maven-jar-plugin + 3.0.2 + + + org.apache.maven.plugins + maven-resources-plugin + 3.0.1 + + + + - - org.sonatype.plugins - nexus-staging-maven-plugin - 1.6.8 - true - - ossrh - https://oss.sonatype.org/ - true - - - - org.apache.maven.plugins - maven-source-plugin - 3.0.1 - - - attach-sources - - jar-no-fork - - - - + + org.sonatype.plugins + nexus-staging-maven-plugin + 1.6.8 + true + + ossrh + https://oss.sonatype.org/ + true + + + + org.apache.maven.plugins + maven-source-plugin + 3.0.1 + + + attach-sources + + jar-no-fork + + + + - - org.apache.maven.plugins - maven-javadoc-plugin - 3.0.0-M1 - - - attach-javadocs - - jar - - - - - - - org.jacoco - jacoco-maven-plugin - 0.7.9 - - - - prepare-agent - - - - report - test - - report - - - - - - org.apache.maven.plugins - maven-checkstyle-plugin - 2.17 - - true - checkstyle.xml - true - - - - verify - - checkstyle - - - - - - - - - org.junit.jupiter - junit-jupiter-engine - 5.0.2 - test - - - org.junit.platform - junit-platform-runner - 1.0.2 - test - - - org.hamcrest - hamcrest-all - 1.3 - test - - - org.assertj - assertj-core - 3.8.0 - test - - - org.mockito - mockito-core - 2.12.0 - test - - - com.squareup.retrofit2 - retrofit - 2.3.0 - - - com.squareup.retrofit2 - converter-moshi - 2.3.0 - - - - com.squareup.okhttp3 - okhttp - 3.9.1 - - - com.squareup.okhttp3 - logging-interceptor - 3.9.1 - - + + org.apache.maven.plugins + maven-javadoc-plugin + 3.0.0-M1 + + + attach-javadocs + + jar + + + + + + + org.jacoco + jacoco-maven-plugin + 0.7.9 + + + + prepare-agent + + + + report + test + + report + + + + + + org.apache.maven.plugins + maven-checkstyle-plugin + 2.17 + + true + checkstyle.xml + true + + + + verify + + checkstyle + + + + + + + + + org.junit.jupiter + junit-jupiter-engine + 5.0.2 + test + + + org.junit.platform + junit-platform-runner + 1.0.2 + test + + + org.hamcrest + hamcrest-all + 1.3 + test + + + org.assertj + assertj-core + 3.8.0 + test + + + org.mockito + mockito-core + 2.12.0 + test + + + com.squareup.retrofit2 + retrofit + 2.3.0 + + + com.squareup.retrofit2 + converter-moshi + 2.3.0 + + + + com.squareup.okhttp3 + okhttp + 3.9.1 + + + com.squareup.okhttp3 + logging-interceptor + 3.9.1 + + diff --git a/src/test/java/org/influxdb/InfluxDBTest.java b/src/test/java/org/influxdb/InfluxDBTest.java index 693494ea3..ce4cea1a3 100644 --- a/src/test/java/org/influxdb/InfluxDBTest.java +++ b/src/test/java/org/influxdb/InfluxDBTest.java @@ -2,7 +2,6 @@ import java.io.IOException; import java.net.URL; -import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Set; @@ -36,7 +35,6 @@ public class InfluxDBTest { private InfluxDB influxDB; - private final static int UDP_PORT = 8089; private final static String UDP_DATABASE = "udp"; /** @@ -188,64 +186,7 @@ public void testWrite() { Assertions.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); this.influxDB.deleteDatabase(dbName); } - - /** - * Test the implementation of {@link InfluxDB#write(int, Point)}'s sync - * support. - */ - @Test - public void testSyncWritePointThroughUDP() throws InterruptedException { - this.influxDB.disableBatch(); - String measurement = TestUtils.getRandomMeasurement(); - Point point = Point.measurement(measurement).tag("atag", "test").addField("used", 80L).addField("free", 1L).build(); - this.influxDB.write(UDP_PORT, point); - Thread.sleep(2000); - Query query = new Query("SELECT * FROM " + measurement + " GROUP BY *", UDP_DATABASE); - QueryResult result = this.influxDB.query(query); - Assertions.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); - } - - /** - * Test the implementation of {@link InfluxDB#write(int, Point)}'s async - * support. - */ - @Test - public void testAsyncWritePointThroughUDP() throws InterruptedException { - this.influxDB.enableBatch(1, 1, TimeUnit.SECONDS); - try { - Assertions.assertTrue(this.influxDB.isBatchEnabled()); - String measurement = TestUtils.getRandomMeasurement(); - Point point = Point.measurement(measurement).tag("atag", "test").addField("used", 80L).addField("free", 1L).build(); - this.influxDB.write(UDP_PORT, point); - Thread.sleep(2000); - Query query = new Query("SELECT * FROM " + measurement + " GROUP BY *", UDP_DATABASE); - QueryResult result = this.influxDB.query(query); - Assertions.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); - } finally { - this.influxDB.disableBatch(); - } - } - - /** - * Test the implementation of {@link InfluxDB#write(int, Point)}'s async - * support. - */ - @Test - public void testAsyncWritePointThroughUDPFail() { - this.influxDB.enableBatch(1, 1, TimeUnit.SECONDS); - try { - Assertions.assertTrue(this.influxDB.isBatchEnabled()); - String measurement = TestUtils.getRandomMeasurement(); - Point point = Point.measurement(measurement).tag("atag", "test").addField("used", 80L).addField("free", 1L).build(); - Thread.currentThread().interrupt(); - Assertions.assertThrows(RuntimeException.class, () -> { - this.influxDB.write(UDP_PORT, point); - }); - } finally { - this.influxDB.disableBatch(); - } - } - + /** * Test writing to the database using string protocol. */ @@ -279,91 +220,7 @@ public void testWriteStringDataSimple() { this.influxDB.deleteDatabase(dbName); } - /** - * Test writing to the database using string protocol through UDP. - */ - @Test - public void testWriteStringDataThroughUDP() throws InterruptedException { - String measurement = TestUtils.getRandomMeasurement(); - this.influxDB.write(UDP_PORT, measurement + ",atag=test idle=90,usertime=9,system=1"); - //write with UDP may be executed on server after query with HTTP. so sleep 2s to handle this case - Thread.sleep(2000); - Query query = new Query("SELECT * FROM " + measurement + " GROUP BY *", UDP_DATABASE); - QueryResult result = this.influxDB.query(query); - Assertions.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); - } - - /** - * Test writing multiple records to the database using string protocol - * through UDP. - */ - @Test - public void testWriteMultipleStringDataThroughUDP() throws InterruptedException { - String measurement = TestUtils.getRandomMeasurement(); - this.influxDB.write(UDP_PORT, measurement + ",atag=test1 idle=100,usertime=10,system=1\n" - + measurement + ",atag=test2 idle=200,usertime=20,system=2\n" - + measurement + ",atag=test3 idle=300,usertime=30,system=3"); - Thread.sleep(2000); - Query query = new Query("SELECT * FROM " + measurement + " GROUP BY *", UDP_DATABASE); - QueryResult result = this.influxDB.query(query); - - Assertions.assertEquals(3, result.getResults().get(0).getSeries().size()); - Assertions.assertEquals("test1", result.getResults().get(0).getSeries().get(0).getTags().get("atag")); - Assertions.assertEquals("test2", result.getResults().get(0).getSeries().get(1).getTags().get("atag")); - Assertions.assertEquals("test3", result.getResults().get(0).getSeries().get(2).getTags().get("atag")); - } - - /** - * Test writing multiple separate records to the database using string - * protocol through UDP. - */ - @Test - public void testWriteMultipleStringDataLinesThroughUDP() throws InterruptedException { - String measurement = TestUtils.getRandomMeasurement(); - this.influxDB.write(UDP_PORT, Arrays.asList( - measurement + ",atag=test1 idle=100,usertime=10,system=1", - measurement + ",atag=test2 idle=200,usertime=20,system=2", - measurement + ",atag=test3 idle=300,usertime=30,system=3" - )); - Thread.sleep(2000); - Query query = new Query("SELECT * FROM " + measurement + " GROUP BY *", UDP_DATABASE); - QueryResult result = this.influxDB.query(query); - - Assertions.assertEquals(3, result.getResults().get(0).getSeries().size()); - Assertions.assertEquals("test1", result.getResults().get(0).getSeries().get(0).getTags().get("atag")); - Assertions.assertEquals("test2", result.getResults().get(0).getSeries().get(1).getTags().get("atag")); - Assertions.assertEquals("test3", result.getResults().get(0).getSeries().get(2).getTags().get("atag")); - } - - /** - * When batch of points' size is over UDP limit, the expected exception is - * java.lang.RuntimeException: java.net.SocketException: The message is - * larger than the maximum supported by the underlying transport: Datagram - * send failed - * - * @throws Exception - */ - @Test - public void testWriteMultipleStringDataLinesOverUDPLimit() throws Exception { - //prepare data - List lineProtocols = new ArrayList(); - int i = 0; - int length = 0; - while (true) { - Point point = Point.measurement("udp_single_poit").addField("v", i).build(); - String lineProtocol = point.lineProtocol(); - length += (lineProtocol.getBytes("utf-8")).length; - lineProtocols.add(lineProtocol); - if (length > 65535) { - break; - } - } - //write batch of string which size is over 64K - Assertions.assertThrows(RuntimeException.class, () -> { - this.influxDB.write(UDP_PORT, lineProtocols); - }); - } - + /** * Test writing multiple records to the database using string protocol. */ diff --git a/src/test/java/org/influxdb/UDPInfluxDBTest.java b/src/test/java/org/influxdb/UDPInfluxDBTest.java new file mode 100644 index 000000000..a0ff74c62 --- /dev/null +++ b/src/test/java/org/influxdb/UDPInfluxDBTest.java @@ -0,0 +1,214 @@ +package org.influxdb; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.TimeUnit; +import org.influxdb.InfluxDB.LogLevel; +import org.influxdb.dto.Point; +import org.influxdb.dto.Pong; +import org.influxdb.dto.Query; +import org.influxdb.dto.QueryResult; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.platform.runner.JUnitPlatform; +import org.junit.runner.RunWith; + +/** + * Test the InfluxDB API. + * + * @author stefan.majer [at] gmail.com + * + */ +@RunWith(JUnitPlatform.class) +public class UDPInfluxDBTest { + + private InfluxDB influxDB; + private final static int UDP_PORT = 8089; + private final static String UDP_DATABASE = "udp"; + + /** + * Create a influxDB connection before all tests start. + * + * @throws InterruptedException + * @throws IOException + */ + @BeforeEach + public void setUp() throws InterruptedException, IOException { + this.influxDB = InfluxDBFactory.connect(TestUtils.getInfluxURL(), "admin", "admin"); + boolean influxDBstarted = false; + do { + Pong response; + try { + response = this.influxDB.ping(); + if (!response.getVersion().equalsIgnoreCase("unknown")) { + influxDBstarted = true; + } + } catch (Exception e) { + // NOOP intentional + e.printStackTrace(); + } + Thread.sleep(100L); + } while (!influxDBstarted); + this.influxDB.setLogLevel(LogLevel.NONE); + this.influxDB.createDatabase(UDP_DATABASE); + System.out.println("################################################################################## "); + System.out.println("# Connected to InfluxDB Version: " + this.influxDB.version() + " #"); + System.out.println("##################################################################################"); + } + + /** + * delete UDP database after all tests end. + */ + @AfterEach + public void cleanup() { + this.influxDB.deleteDatabase(UDP_DATABASE); + } + + /** + * Test the implementation of {@link InfluxDB#write(int, Point)}'s sync + * support. + */ + @Test + public void testSyncWritePointThroughUDP() throws InterruptedException { + this.influxDB.disableBatch(); + String measurement = TestUtils.getRandomMeasurement(); + Point point = Point.measurement(measurement).tag("atag", "test").addField("used", 80L).addField("free", 1L).build(); + this.influxDB.write(UDP_PORT, point); + Thread.sleep(2000); + Query query = new Query("SELECT * FROM " + measurement + " GROUP BY *", UDP_DATABASE); + QueryResult result = this.influxDB.query(query); + Assertions.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); + } + + /** + * Test the implementation of {@link InfluxDB#write(int, Point)}'s async + * support. + */ + @Test + public void testAsyncWritePointThroughUDP() throws InterruptedException { + this.influxDB.enableBatch(1, 1, TimeUnit.SECONDS); + try { + Assertions.assertTrue(this.influxDB.isBatchEnabled()); + String measurement = TestUtils.getRandomMeasurement(); + Point point = Point.measurement(measurement).tag("atag", "test").addField("used", 80L).addField("free", 1L).build(); + this.influxDB.write(UDP_PORT, point); + Thread.sleep(2000); + Query query = new Query("SELECT * FROM " + measurement + " GROUP BY *", UDP_DATABASE); + QueryResult result = this.influxDB.query(query); + Assertions.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); + } finally { + this.influxDB.disableBatch(); + } + } + + /** + * Test the implementation of {@link InfluxDB#write(int, Point)}'s async + * support. + */ + @Test + public void testAsyncWritePointThroughUDPFail() { + this.influxDB.enableBatch(1, 1, TimeUnit.SECONDS); + try { + Assertions.assertTrue(this.influxDB.isBatchEnabled()); + String measurement = TestUtils.getRandomMeasurement(); + Point point = Point.measurement(measurement).tag("atag", "test").addField("used", 80L).addField("free", 1L).build(); + Thread.currentThread().interrupt(); + Assertions.assertThrows(RuntimeException.class, () -> { + this.influxDB.write(UDP_PORT, point); + }); + } finally { + this.influxDB.disableBatch(); + } + } + + /** + * Test writing to the database using string protocol through UDP. + */ + @Test + public void testWriteStringDataThroughUDP() throws InterruptedException { + String measurement = TestUtils.getRandomMeasurement(); + this.influxDB.write(UDP_PORT, measurement + ",atag=test idle=90,usertime=9,system=1"); + //write with UDP may be executed on server after query with HTTP. so sleep 2s to handle this case + Thread.sleep(2000); + Query query = new Query("SELECT * FROM " + measurement + " GROUP BY *", UDP_DATABASE); + QueryResult result = this.influxDB.query(query); + Assertions.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); + } + + + + /** + * When batch of points' size is over UDP limit, the expected exception is + * java.lang.RuntimeException: java.net.SocketException: The message is + * larger than the maximum supported by the underlying transport: Datagram + * send failed + * + * @throws Exception + */ + @Test + public void testWriteMultipleStringDataLinesOverUDPLimit() throws Exception { + //prepare data + List lineProtocols = new ArrayList(); + int i = 0; + int length = 0; + while (true) { + Point point = Point.measurement("udp_single_poit").addField("v", i).build(); + String lineProtocol = point.lineProtocol(); + length += (lineProtocol.getBytes("utf-8")).length; + lineProtocols.add(lineProtocol); + if (length > 65535) { + break; + } + } + //write batch of string which size is over 64K + Assertions.assertThrows(RuntimeException.class, () -> { + this.influxDB.write(UDP_PORT, lineProtocols); + }); + } + + /** + * Test writing multiple records to the database using string protocol + * through UDP. + */ + @Test + public void testWriteMultipleStringDataThroughUDP() throws InterruptedException { + String measurement = TestUtils.getRandomMeasurement(); + this.influxDB.write(UDP_PORT, measurement + ",atag=test1 idle=100,usertime=10,system=1\n" + + measurement + ",atag=test2 idle=200,usertime=20,system=2\n" + + measurement + ",atag=test3 idle=300,usertime=30,system=3"); + Thread.sleep(2000); + Query query = new Query("SELECT * FROM " + measurement + " GROUP BY *", UDP_DATABASE); + QueryResult result = this.influxDB.query(query); + + Assertions.assertEquals(3, result.getResults().get(0).getSeries().size()); + Assertions.assertEquals("test1", result.getResults().get(0).getSeries().get(0).getTags().get("atag")); + Assertions.assertEquals("test2", result.getResults().get(0).getSeries().get(1).getTags().get("atag")); + Assertions.assertEquals("test3", result.getResults().get(0).getSeries().get(2).getTags().get("atag")); + } + + /** + * Test writing multiple separate records to the database using string + * protocol through UDP. + */ + @Test + public void testWriteMultipleStringDataLinesThroughUDP() throws InterruptedException { + String measurement = TestUtils.getRandomMeasurement(); + this.influxDB.write(UDP_PORT, Arrays.asList( + measurement + ",atag=test1 idle=100,usertime=10,system=1", + measurement + ",atag=test2 idle=200,usertime=20,system=2", + measurement + ",atag=test3 idle=300,usertime=30,system=3" + )); + Thread.sleep(2000); + Query query = new Query("SELECT * FROM " + measurement + " GROUP BY *", UDP_DATABASE); + QueryResult result = this.influxDB.query(query); + + Assertions.assertEquals(3, result.getResults().get(0).getSeries().size()); + Assertions.assertEquals("test1", result.getResults().get(0).getSeries().get(0).getTags().get("atag")); + Assertions.assertEquals("test2", result.getResults().get(0).getSeries().get(1).getTags().get("atag")); + Assertions.assertEquals("test3", result.getResults().get(0).getSeries().get(2).getTags().get("atag")); + } +} diff --git a/src/test/nginx/nginx.conf b/src/test/nginx/nginx.conf new file mode 100644 index 000000000..170a7bde2 --- /dev/null +++ b/src/test/nginx/nginx.conf @@ -0,0 +1,56 @@ +server { + listen 8080; + server_name localhost; + + #charset koi8-r; + #access_log /var/log/nginx/host.access.log main; + + location / { + proxy_pass http://influxdb:8086/; + #root /var/www/htmlllll; + #index index.html index.htm; + } + + location /influx-api/ { + proxy_pass http://influxdb:8086/; + proxy_http_version 1.1; + proxy_set_header Host $http_host; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_redirect off; + error_log /tmp/inluxproxy.debug.log debug; + } + + + #error_page 404 /404.html; + + # redirect server error pages to the static page /50x.html + # + error_page 500 502 503 504 /50x.html; + location = /50x.html { + root /usr/share/nginx/html; + } + + # proxy the PHP scripts to Apache listening on 127.0.0.1:80 + # + #location ~ \.php$ { + # proxy_pass http://127.0.0.1; + #} + + # pass the PHP scripts to FastCGI server listening on 127.0.0.1:9000 + # + #location ~ \.php$ { + # root html; + # fastcgi_pass 127.0.0.1:9000; + # fastcgi_index index.php; + # fastcgi_param SCRIPT_FILENAME /scripts$fastcgi_script_name; + # include fastcgi_params; + #} + + # deny access to .htaccess files, if Apache's document root + # concurs with nginx's one + # + #location ~ /\.ht { + # deny all; + #} +} + From 7605a843cfb855d789ebc85fff4ccf2e771fa8f1 Mon Sep 17 00:00:00 2001 From: Fernando Machado Date: Wed, 3 Jan 2018 15:07:08 +0100 Subject: [PATCH 094/745] Added 'deprecated' annotations to javadoc and methods --- src/main/java/org/influxdb/InfluxDB.java | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/src/main/java/org/influxdb/InfluxDB.java b/src/main/java/org/influxdb/InfluxDB.java index 3714eb131..ebc92ccad 100644 --- a/src/main/java/org/influxdb/InfluxDB.java +++ b/src/main/java/org/influxdb/InfluxDB.java @@ -342,7 +342,10 @@ public void write(final String database, final String retentionPolicy, * * @param name * the name of the new database. + * @deprecated (since 2.9) Use org.influxdb.InfluxDB.query(Query) to execute a parameterized + * CREATE DATABASE query. */ + @Deprecated public void createDatabase(final String name); /** @@ -350,14 +353,20 @@ public void write(final String database, final String retentionPolicy, * * @param name * the name of the database to delete. + * @deprecated (since 2.9) Use org.influxdb.InfluxDB.query(Query) to execute a + * DROP DATABASE query. */ + @Deprecated public void deleteDatabase(final String name); /** * Describe all available databases. * * @return a List of all Database names. + * @deprecated (since 2.9) Use org.influxdb.InfluxDB.query(Query) to execute a + * SHOW DATABASES query. */ + @Deprecated public List describeDatabases(); /** @@ -367,6 +376,8 @@ public void write(final String database, final String retentionPolicy, * the name of the database to search. * * @return true if the database exists or false if it doesn't exist + * @deprecated (since 2.9) Use org.influxdb.InfluxDB.query(Query) to execute a + * SHOW DATABASES query and inspect the result. */ public boolean databaseExists(final String name); @@ -418,7 +429,10 @@ public void write(final String database, final String retentionPolicy, * @param shardDuration the shardDuration * @param replicationFactor the replicationFactor of the rp * @param isDefault if the rp is the default rp for the database or not + * @deprecated (since 2.9) Use org.influxdb.InfluxDB.query(Query) to execute a parameterized + * CREATE RETENTION POLICY query. */ + @Deprecated public void createRetentionPolicy(final String rpName, final String database, final String duration, final String shardDuration, final int replicationFactor, final boolean isDefault); @@ -429,7 +443,10 @@ public void createRetentionPolicy(final String rpName, final String database, fi * @param duration the duration of the rp * @param replicationFactor the replicationFactor of the rp * @param isDefault if the rp is the default rp for the database or not + * @deprecated (since 2.9) Use org.influxdb.InfluxDB.query(Query) to execute a parameterized + * CREATE RETENTION POLICY query. */ + @Deprecated public void createRetentionPolicy(final String rpName, final String database, final String duration, final int replicationFactor, final boolean isDefault); @@ -440,7 +457,10 @@ public void createRetentionPolicy(final String rpName, final String database, fi * @param duration the duration of the rp * @param shardDuration the shardDuration * @param replicationFactor the replicationFactor of the rp + * @deprecated (since 2.9) Use org.influxdb.InfluxDB.query(Query) to execute a parameterized + * CREATE RETENTION POLICY query. */ + @Deprecated public void createRetentionPolicy(final String rpName, final String database, final String duration, final String shardDuration, final int replicationFactor); @@ -448,6 +468,9 @@ public void createRetentionPolicy(final String rpName, final String database, fi * Drops a retentionPolicy in a database. * @param rpName the name of the retentionPolicy * @param database the name of the database + * @deprecated (since 2.9) Use org.influxdb.InfluxDB.query(Query) to execute a + * DROP RETENTION POLICY query. */ + @Deprecated public void dropRetentionPolicy(final String rpName, final String database); } From e6bc0a219378942c9d549ef6b7c82ac057274080 Mon Sep 17 00:00:00 2001 From: Fernando Machado Date: Wed, 3 Jan 2018 15:14:45 +0100 Subject: [PATCH 095/745] Added missing deprecated annotation. --- src/main/java/org/influxdb/InfluxDB.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/org/influxdb/InfluxDB.java b/src/main/java/org/influxdb/InfluxDB.java index ebc92ccad..a5fa5d2d6 100644 --- a/src/main/java/org/influxdb/InfluxDB.java +++ b/src/main/java/org/influxdb/InfluxDB.java @@ -379,6 +379,7 @@ public void write(final String database, final String retentionPolicy, * @deprecated (since 2.9) Use org.influxdb.InfluxDB.query(Query) to execute a * SHOW DATABASES query and inspect the result. */ + @Deprecated public boolean databaseExists(final String name); /** From eefd2aba00f9897b5197aab336cae67c8eb955c0 Mon Sep 17 00:00:00 2001 From: Fernando Machado Date: Thu, 4 Jan 2018 13:41:35 +0100 Subject: [PATCH 096/745] added "removed in 3.0" to deprecated annotations --- src/main/java/org/influxdb/InfluxDB.java | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/src/main/java/org/influxdb/InfluxDB.java b/src/main/java/org/influxdb/InfluxDB.java index a5fa5d2d6..c1359ba23 100644 --- a/src/main/java/org/influxdb/InfluxDB.java +++ b/src/main/java/org/influxdb/InfluxDB.java @@ -342,7 +342,7 @@ public void write(final String database, final String retentionPolicy, * * @param name * the name of the new database. - * @deprecated (since 2.9) Use org.influxdb.InfluxDB.query(Query) to execute a parameterized + * @deprecated (since 2.9, removed in 3.0) Use org.influxdb.InfluxDB.query(Query) to execute a parameterized * CREATE DATABASE query. */ @Deprecated @@ -353,7 +353,7 @@ public void write(final String database, final String retentionPolicy, * * @param name * the name of the database to delete. - * @deprecated (since 2.9) Use org.influxdb.InfluxDB.query(Query) to execute a + * @deprecated (since 2.9, removed in 3.0) Use org.influxdb.InfluxDB.query(Query) to execute a * DROP DATABASE query. */ @Deprecated @@ -363,7 +363,7 @@ public void write(final String database, final String retentionPolicy, * Describe all available databases. * * @return a List of all Database names. - * @deprecated (since 2.9) Use org.influxdb.InfluxDB.query(Query) to execute a + * @deprecated (since 2.9, removed in 3.0) Use org.influxdb.InfluxDB.query(Query) to execute a * SHOW DATABASES query. */ @Deprecated @@ -376,7 +376,7 @@ public void write(final String database, final String retentionPolicy, * the name of the database to search. * * @return true if the database exists or false if it doesn't exist - * @deprecated (since 2.9) Use org.influxdb.InfluxDB.query(Query) to execute a + * @deprecated (since 2.9, removed in 3.0) Use org.influxdb.InfluxDB.query(Query) to execute a * SHOW DATABASES query and inspect the result. */ @Deprecated @@ -430,7 +430,7 @@ public void write(final String database, final String retentionPolicy, * @param shardDuration the shardDuration * @param replicationFactor the replicationFactor of the rp * @param isDefault if the rp is the default rp for the database or not - * @deprecated (since 2.9) Use org.influxdb.InfluxDB.query(Query) to execute a parameterized + * @deprecated (since 2.9, removed in 3.0) Use org.influxdb.InfluxDB.query(Query) to execute a parameterized * CREATE RETENTION POLICY query. */ @Deprecated @@ -444,7 +444,7 @@ public void createRetentionPolicy(final String rpName, final String database, fi * @param duration the duration of the rp * @param replicationFactor the replicationFactor of the rp * @param isDefault if the rp is the default rp for the database or not - * @deprecated (since 2.9) Use org.influxdb.InfluxDB.query(Query) to execute a parameterized + * @deprecated (since 2.9, removed in 3.0) Use org.influxdb.InfluxDB.query(Query) to execute a parameterized * CREATE RETENTION POLICY query. */ @Deprecated @@ -458,7 +458,7 @@ public void createRetentionPolicy(final String rpName, final String database, fi * @param duration the duration of the rp * @param shardDuration the shardDuration * @param replicationFactor the replicationFactor of the rp - * @deprecated (since 2.9) Use org.influxdb.InfluxDB.query(Query) to execute a parameterized + * @deprecated (since 2.9, removed in 3.0) Use org.influxdb.InfluxDB.query(Query) to execute a parameterized * CREATE RETENTION POLICY query. */ @Deprecated @@ -469,7 +469,7 @@ public void createRetentionPolicy(final String rpName, final String database, fi * Drops a retentionPolicy in a database. * @param rpName the name of the retentionPolicy * @param database the name of the database - * @deprecated (since 2.9) Use org.influxdb.InfluxDB.query(Query) to execute a + * @deprecated (since 2.9, removed in 3.0) Use org.influxdb.InfluxDB.query(Query) to execute a * DROP RETENTION POLICY query. */ @Deprecated From 4c6757470804ee95e9dd3bfcabf9abba945780b5 Mon Sep 17 00:00:00 2001 From: Eric Goebelbecker Date: Mon, 8 Jan 2018 05:21:06 -0500 Subject: [PATCH 097/745] Add convenience method to Pong for checking ping status. (#403) * Add convenience method to Pong for checking ping status. * Fix checkstyle checks. * Use constant for "unknown." --- src/main/java/org/influxdb/dto/Pong.java | 8 ++++++++ src/test/java/org/influxdb/InfluxDBTest.java | 2 +- src/test/java/org/influxdb/TicketTest.java | 2 +- 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/src/main/java/org/influxdb/dto/Pong.java b/src/main/java/org/influxdb/dto/Pong.java index 0245a1fde..278633ce1 100644 --- a/src/main/java/org/influxdb/dto/Pong.java +++ b/src/main/java/org/influxdb/dto/Pong.java @@ -9,6 +9,7 @@ public class Pong { private String version; private long responseTime; + private static final String UNKNOWN_VERSION = "unknown"; /** * @return the status @@ -25,6 +26,13 @@ public void setVersion(final String version) { this.version = version; } + /** + * Good or bad connection status. + */ + public boolean isGood() { + return !UNKNOWN_VERSION.equalsIgnoreCase(version); + } + /** * @return the responseTime */ diff --git a/src/test/java/org/influxdb/InfluxDBTest.java b/src/test/java/org/influxdb/InfluxDBTest.java index e206ffaa0..3f7351ac1 100644 --- a/src/test/java/org/influxdb/InfluxDBTest.java +++ b/src/test/java/org/influxdb/InfluxDBTest.java @@ -53,7 +53,7 @@ public void setUp() throws InterruptedException, IOException { Pong response; try { response = this.influxDB.ping(); - if (!response.getVersion().equalsIgnoreCase("unknown")) { + if (response.isGood()) { influxDBstarted = true; } } catch (Exception e) { diff --git a/src/test/java/org/influxdb/TicketTest.java b/src/test/java/org/influxdb/TicketTest.java index 828b30a8e..29aecf09e 100644 --- a/src/test/java/org/influxdb/TicketTest.java +++ b/src/test/java/org/influxdb/TicketTest.java @@ -40,7 +40,7 @@ public void setUp() throws InterruptedException, IOException { Pong response; try { response = this.influxDB.ping(); - if (!response.getVersion().equalsIgnoreCase("unknown")) { + if (response.isGood()) { influxDBstarted = true; } } catch (Exception e) { From 2c63292b864315cfdfb950b38a9d50f415fe8468 Mon Sep 17 00:00:00 2001 From: rbkasat Date: Mon, 8 Jan 2018 02:28:54 -0800 Subject: [PATCH 098/745] added consistency configuration for batch processing (#385) * added consistency configuration for batch processing * added test --- src/main/java/org/influxdb/InfluxDB.java | 24 ++++++++ .../org/influxdb/impl/BatchProcessor.java | 45 ++++++++++---- .../java/org/influxdb/impl/InfluxDBImpl.java | 31 ++++++---- src/test/java/org/influxdb/InfluxDBTest.java | 58 +++++++++++-------- .../org/influxdb/impl/BatchProcessorTest.java | 34 +++++++++-- 5 files changed, 141 insertions(+), 51 deletions(-) diff --git a/src/main/java/org/influxdb/InfluxDB.java b/src/main/java/org/influxdb/InfluxDB.java index c1359ba23..deb192373 100644 --- a/src/main/java/org/influxdb/InfluxDB.java +++ b/src/main/java/org/influxdb/InfluxDB.java @@ -134,6 +134,30 @@ public String value() { */ public InfluxDB enableBatch(final int actions, final int flushDuration, final TimeUnit flushDurationTimeUnit, final ThreadFactory threadFactory); + /** + * Enable batching of single Point writes with consistency set for an entire batch + * flushDurations is reached first, a batch write is issued. + * Note that batch processing needs to be explicitly stopped before the application is shutdown. + * To do so call disableBatch(). Default consistency is ONE. + * + * @param actions + * the number of actions to collect + * @param flushDuration + * the time to wait at most. + * @param flushDurationTimeUnit + * the TimeUnit for the given flushDuration. + * @param threadFactory + * a ThreadFactory instance to be used. + * @param exceptionHandler + * a consumer function to handle asynchronous errors + * @param consistency + * a consistency setting for batch writes. + * @return the InfluxDB instance to be able to use it in a fluent manner. + */ + + InfluxDB enableBatch(int actions, int flushDuration, TimeUnit flushDurationTimeUnit, + ThreadFactory threadFactory, BiConsumer, Throwable> exceptionHandler, + ConsistencyLevel consistency); /** * Enable batching of single Point writes to speed up writes significant. If either actions or diff --git a/src/main/java/org/influxdb/impl/BatchProcessor.java b/src/main/java/org/influxdb/impl/BatchProcessor.java index 28f973dc9..35457a55c 100644 --- a/src/main/java/org/influxdb/impl/BatchProcessor.java +++ b/src/main/java/org/influxdb/impl/BatchProcessor.java @@ -1,5 +1,10 @@ package org.influxdb.impl; +import org.influxdb.InfluxDB; +import org.influxdb.InfluxDB.ConsistencyLevel; +import org.influxdb.dto.BatchPoints; +import org.influxdb.dto.Point; + import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -16,10 +21,6 @@ import java.util.logging.Level; import java.util.logging.Logger; -import org.influxdb.InfluxDB; -import org.influxdb.dto.BatchPoints; -import org.influxdb.dto.Point; - /** * A BatchProcessor can be attached to a InfluxDB Instance to collect single point writes and * aggregates them to BatchPoints to get a better write performance. @@ -27,7 +28,7 @@ * @author stefan.majer [at] gmail.com * */ -public class BatchProcessor { +public final class BatchProcessor { private static final Logger LOG = Logger.getLogger(BatchProcessor.class.getName()); protected final BlockingQueue queue; @@ -37,6 +38,7 @@ public class BatchProcessor { final int actions; private final TimeUnit flushIntervalUnit; private final int flushInterval; + private final ConsistencyLevel consistencyLevel; /** * The Builder to create a BatchProcessor instance. @@ -48,6 +50,7 @@ public static final class Builder { private TimeUnit flushIntervalUnit; private int flushInterval; private BiConsumer, Throwable> exceptionHandler = (entries, throwable) -> { }; + private ConsistencyLevel consistencyLevel; /** * @param threadFactory @@ -107,6 +110,18 @@ public Builder exceptionHandler(final BiConsumer, Throwable> han this.exceptionHandler = handler; return this; } + /** + * Consistency level for batch write. + * + * @param consistencyLevel + * the consistencyLevel + * + * @return this Builder to use it fluent + */ + public Builder consistencyLevel(final ConsistencyLevel consistencyLevel) { + this.consistencyLevel = consistencyLevel; + return this; + } /** * Create the BatchProcessor. @@ -120,8 +135,9 @@ public BatchProcessor build() { Objects.requireNonNull(this.flushIntervalUnit, "flushIntervalUnit"); Objects.requireNonNull(this.threadFactory, "threadFactory"); Objects.requireNonNull(this.exceptionHandler, "exceptionHandler"); - return new BatchProcessor(this.influxDB, this.threadFactory, this.actions, this.flushIntervalUnit, - this.flushInterval, exceptionHandler); + return new BatchProcessor(this.influxDB, this.threadFactory, + this.actions, this.flushIntervalUnit, + this.flushInterval, exceptionHandler, this.consistencyLevel); } } @@ -180,9 +196,10 @@ public static Builder builder(final InfluxDB influxDB) { return new Builder(influxDB); } - BatchProcessor(final InfluxDBImpl influxDB, final ThreadFactory threadFactory, final int actions, - final TimeUnit flushIntervalUnit, final int flushInterval, - final BiConsumer, Throwable> exceptionHandler) { + private BatchProcessor(final InfluxDBImpl influxDB, final ThreadFactory threadFactory, final int actions, + final TimeUnit flushIntervalUnit, final int flushInterval, + final BiConsumer, Throwable> exceptionHandler, + final ConsistencyLevel consistencyLevel) { super(); this.influxDB = influxDB; this.actions = actions; @@ -190,6 +207,7 @@ public static Builder builder(final InfluxDB influxDB) { this.flushInterval = flushInterval; this.scheduler = Executors.newSingleThreadScheduledExecutor(threadFactory); this.exceptionHandler = exceptionHandler; + this.consistencyLevel = consistencyLevel; if (actions > 1 && actions < Integer.MAX_VALUE) { this.queue = new LinkedBlockingQueue<>(actions); } else { @@ -229,7 +247,7 @@ void write() { String batchKey = dbName + "_" + rp; if (!batchKeyToBatchPoints.containsKey(batchKey)) { BatchPoints batchPoints = BatchPoints.database(dbName) - .retentionPolicy(rp).build(); + .retentionPolicy(rp).consistency(getConsistencyLevel()).build(); batchKeyToBatchPoints.put(batchKey, batchPoints); } batchKeyToBatchPoints.get(batchKey).point(point); @@ -297,4 +315,9 @@ void flushAndShutdown() { void flush() { this.write(); } + + public ConsistencyLevel getConsistencyLevel() { + return consistencyLevel; + } + } diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index 070a6dbe8..d85600efc 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -3,7 +3,15 @@ import com.squareup.moshi.JsonAdapter; import com.squareup.moshi.Moshi; - +import okhttp3.Headers; +import okhttp3.HttpUrl; +import okhttp3.MediaType; +import okhttp3.OkHttpClient; +import okhttp3.RequestBody; +import okhttp3.ResponseBody; +import okhttp3.logging.HttpLoggingInterceptor; +import okhttp3.logging.HttpLoggingInterceptor.Level; +import okio.BufferedSource; import org.influxdb.InfluxDB; import org.influxdb.InfluxDBException; import org.influxdb.InfluxDBIOException; @@ -14,16 +22,6 @@ import org.influxdb.dto.QueryResult; import org.influxdb.impl.BatchProcessor.HttpBatchEntry; import org.influxdb.impl.BatchProcessor.UdpBatchEntry; - -import okhttp3.Headers; -import okhttp3.HttpUrl; -import okhttp3.MediaType; -import okhttp3.OkHttpClient; -import okhttp3.RequestBody; -import okhttp3.ResponseBody; -import okhttp3.logging.HttpLoggingInterceptor; -import okhttp3.logging.HttpLoggingInterceptor.Level; -import okio.BufferedSource; import retrofit2.Call; import retrofit2.Callback; import retrofit2.Response; @@ -201,6 +199,16 @@ public InfluxDB enableBatch(final int actions, final int flushDuration, return this; } + @Override + public InfluxDB enableBatch(final int actions, final int flushDuration, final TimeUnit flushDurationTimeUnit, + final ThreadFactory threadFactory, + final BiConsumer, Throwable> exceptionHandler, + final ConsistencyLevel consistency) { + enableBatch(actions, flushDuration, flushDurationTimeUnit, threadFactory, exceptionHandler) + .setConsistency(consistency); + return this; + } + @Override public InfluxDB enableBatch(final int actions, final int flushDuration, final TimeUnit flushDurationTimeUnit, final ThreadFactory threadFactory, @@ -214,6 +222,7 @@ public InfluxDB enableBatch(final int actions, final int flushDuration, final Ti .exceptionHandler(exceptionHandler) .interval(flushDuration, flushDurationTimeUnit) .threadFactory(threadFactory) + .consistencyLevel(consistency) .build(); this.batchEnabled.set(true); return this; diff --git a/src/test/java/org/influxdb/InfluxDBTest.java b/src/test/java/org/influxdb/InfluxDBTest.java index 3f7351ac1..dda093351 100644 --- a/src/test/java/org/influxdb/InfluxDBTest.java +++ b/src/test/java/org/influxdb/InfluxDBTest.java @@ -1,17 +1,5 @@ package org.influxdb; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Set; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.ThreadFactory; -import java.util.concurrent.TimeUnit; -import java.util.function.Consumer; - import org.influxdb.InfluxDB.LogLevel; import org.influxdb.dto.BatchPoints; import org.influxdb.dto.Point; @@ -19,13 +7,26 @@ import org.influxdb.dto.Query; import org.influxdb.dto.QueryResult; import org.influxdb.impl.InfluxDBImpl; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.platform.runner.JUnitPlatform; import org.junit.runner.RunWith; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Set; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.Executors; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; + /** * Test the InfluxDB API. * @@ -147,7 +148,7 @@ public void testDescribeDatabases() { Assertions.assertTrue(found, "It is expected that describeDataBases contents the newly create database."); this.influxDB.deleteDatabase(dbName); } - + /** * Test that Database exists works. */ @@ -188,7 +189,7 @@ public void testWrite() { Assertions.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); this.influxDB.deleteDatabase(dbName); } - + /** * Test the implementation of {@link InfluxDB#write(int, Point)}'s sync support. */ @@ -203,7 +204,7 @@ public void testSyncWritePointThroughUDP() throws InterruptedException { QueryResult result = this.influxDB.query(query); Assertions.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); } - + /** * Test the implementation of {@link InfluxDB#write(int, Point)}'s async support. */ @@ -223,8 +224,8 @@ public void testAsyncWritePointThroughUDP() throws InterruptedException { this.influxDB.disableBatch(); } } - - + + /** * Test the implementation of {@link InfluxDB#write(int, Point)}'s async support. */ @@ -461,7 +462,7 @@ public void testCreateNumericNamedDatabase() { Assertions.assertTrue(result.contains(numericDbName)); this.influxDB.deleteDatabase(numericDbName); } - + /** * Test that creating database which name is empty will throw expected exception */ @@ -501,7 +502,7 @@ public void testIsBatchEnabled() { this.influxDB.disableBatch(); Assertions.assertFalse(this.influxDB.isBatchEnabled()); } - + /** * Test the implementation of {@link InfluxDB#enableBatch(int, int, TimeUnit, ThreadFactory)}. */ @@ -509,7 +510,7 @@ public void testIsBatchEnabled() { public void testBatchEnabledWithThreadFactory() { final String threadName = "async_influxdb_write"; this.influxDB.enableBatch(1, 1, TimeUnit.SECONDS, new ThreadFactory() { - + @Override public Thread newThread(Runnable r) { Thread thread = new Thread(r); @@ -524,7 +525,7 @@ public Thread newThread(Runnable r) { existThreadWithSettedName = true; break; } - + } Assertions.assertTrue(existThreadWithSettedName); this.influxDB.disableBatch(); @@ -536,7 +537,7 @@ public void testBatchEnabledWithThreadFactoryIsNull() { this.influxDB.enableBatch(1, 1, TimeUnit.SECONDS, null); }); } - + /** * Test the implementation of {@link InfluxDBImpl#InfluxDBImpl(String, String, String, okhttp3.OkHttpClient.Builder)}. */ @@ -778,4 +779,15 @@ public void testCreateDropRetentionPolicies() { Assertions.assertTrue(retentionPolicies.size() == 1); } + /** + * Test the implementation of {@link InfluxDB#isBatchEnabled() with consistency}. + */ + @Test + public void testIsBatchEnabledWithConsistency() { + Assertions.assertFalse(this.influxDB.isBatchEnabled()); + this.influxDB.enableBatch(1, 1, TimeUnit.SECONDS, Executors.defaultThreadFactory(), + (a, b) -> { + }, InfluxDB.ConsistencyLevel.ALL); + Assertions.assertTrue(this.influxDB.isBatchEnabled()); + } } diff --git a/src/test/java/org/influxdb/impl/BatchProcessorTest.java b/src/test/java/org/influxdb/impl/BatchProcessorTest.java index c30c3b388..8a17245f0 100644 --- a/src/test/java/org/influxdb/impl/BatchProcessorTest.java +++ b/src/test/java/org/influxdb/impl/BatchProcessorTest.java @@ -21,6 +21,11 @@ import org.junit.platform.runner.JUnitPlatform; import org.junit.runner.RunWith; +import static org.junit.Assert.assertNull; +import static org.hamcrest.CoreMatchers.*; +import static org.junit.Assert.assertThat; + + @RunWith(JUnitPlatform.class) public class BatchProcessorTest { @@ -115,8 +120,8 @@ public void testFlushWritesBufferedPointsAndDoesNotShutdownScheduler() throws In public void testActionsIsZero() throws InterruptedException, IOException { InfluxDB mockInfluxDB = mock(InfluxDBImpl.class); Assertions.assertThrows(IllegalArgumentException.class, () -> { - BatchProcessor.builder(mockInfluxDB).actions(0) - .interval(1, TimeUnit.NANOSECONDS).build(); + BatchProcessor.builder(mockInfluxDB).actions(0) + .interval(1, TimeUnit.NANOSECONDS).build(); }); } @@ -124,8 +129,8 @@ public void testActionsIsZero() throws InterruptedException, IOException { public void testIntervalIsZero() throws InterruptedException, IOException { InfluxDB mockInfluxDB = mock(InfluxDBImpl.class); Assertions.assertThrows(IllegalArgumentException.class, () -> { - BatchProcessor.builder(mockInfluxDB).actions(1) - .interval(0, TimeUnit.NANOSECONDS).build(); + BatchProcessor.builder(mockInfluxDB).actions(1) + .interval(0, TimeUnit.NANOSECONDS).build(); }); } @@ -133,8 +138,25 @@ public void testIntervalIsZero() throws InterruptedException, IOException { public void testInfluxDBIsNull() throws InterruptedException, IOException { InfluxDB mockInfluxDB = null; Assertions.assertThrows(NullPointerException.class, () -> { - BatchProcessor.builder(mockInfluxDB).actions(1) - .interval(1, TimeUnit.NANOSECONDS).build(); + BatchProcessor.builder(mockInfluxDB).actions(1) + .interval(1, TimeUnit.NANOSECONDS).build(); }); } + + @Test + public void testConsistencyLevelNull() throws InterruptedException, IOException { + InfluxDB mockInfluxDB = mock(InfluxDBImpl.class); + BatchProcessor batchProcessor = BatchProcessor.builder(mockInfluxDB).actions(Integer.MAX_VALUE) + .interval(1, TimeUnit.NANOSECONDS).build(); + assertNull(batchProcessor.getConsistencyLevel()); + } + + @Test + public void testConsistencyLevelUpdated() throws InterruptedException, IOException { + InfluxDB mockInfluxDB = mock(InfluxDBImpl.class); + BatchProcessor batchProcessor = BatchProcessor.builder(mockInfluxDB).actions(Integer.MAX_VALUE) + .interval(1, TimeUnit.NANOSECONDS).consistencyLevel(InfluxDB.ConsistencyLevel.ANY).build(); + assertThat(batchProcessor.getConsistencyLevel(), is(equalTo(InfluxDB.ConsistencyLevel.ANY))); + } + } From c7d94556ed3db4d59175fe285ca0662cce4ba5d2 Mon Sep 17 00:00:00 2001 From: dubsky Date: Wed, 13 Dec 2017 13:40:20 +0100 Subject: [PATCH 099/745] separating InfluxDB initialization into TestUtils --- src/test/java/org/influxdb/InfluxDBTest.java | 20 +--------------- src/test/java/org/influxdb/TestUtils.java | 25 ++++++++++++++++++++ src/test/java/org/influxdb/TicketTest.java | 20 +--------------- 3 files changed, 27 insertions(+), 38 deletions(-) diff --git a/src/test/java/org/influxdb/InfluxDBTest.java b/src/test/java/org/influxdb/InfluxDBTest.java index dda093351..c9b1eee21 100644 --- a/src/test/java/org/influxdb/InfluxDBTest.java +++ b/src/test/java/org/influxdb/InfluxDBTest.java @@ -48,26 +48,8 @@ public class InfluxDBTest { */ @BeforeEach public void setUp() throws InterruptedException, IOException { - this.influxDB = InfluxDBFactory.connect("http://" + TestUtils.getInfluxIP() + ":" + TestUtils.getInfluxPORT(true), "admin", "admin"); - boolean influxDBstarted = false; - do { - Pong response; - try { - response = this.influxDB.ping(); - if (response.isGood()) { - influxDBstarted = true; - } - } catch (Exception e) { - // NOOP intentional - e.printStackTrace(); - } - Thread.sleep(100L); - } while (!influxDBstarted); - this.influxDB.setLogLevel(LogLevel.NONE); + this.influxDB = TestUtils.connectToInfluxDB(); this.influxDB.createDatabase(UDP_DATABASE); - System.out.println("################################################################################## "); - System.out.println("# Connected to InfluxDB Version: " + this.influxDB.version() + " #"); - System.out.println("##################################################################################"); } /** diff --git a/src/test/java/org/influxdb/TestUtils.java b/src/test/java/org/influxdb/TestUtils.java index 7ad8dff24..865ecdcf2 100644 --- a/src/test/java/org/influxdb/TestUtils.java +++ b/src/test/java/org/influxdb/TestUtils.java @@ -1,5 +1,8 @@ package org.influxdb; +import org.influxdb.dto.Pong; + +import java.io.IOException; import java.util.Map; public class TestUtils { @@ -44,4 +47,26 @@ public static String defaultRetentionPolicy(String version) { } } + public static InfluxDB connectToInfluxDB() throws InterruptedException, IOException { + InfluxDB influxDB = InfluxDBFactory.connect("http://" + TestUtils.getInfluxIP() + ":" + TestUtils.getInfluxPORT(true), "admin", "admin"); + boolean influxDBstarted = false; + do { + Pong response; + try { + response = influxDB.ping(); + if (response.isGood()) { + influxDBstarted = true; + } + } catch (Exception e) { + // NOOP intentional + e.printStackTrace(); + } + Thread.sleep(100L); + } while (!influxDBstarted); + influxDB.setLogLevel(InfluxDB.LogLevel.NONE); + System.out.println("##################################################################################"); + System.out.println("# Connected to InfluxDB Version: " + this.influxDB.version() + " #"); + System.out.println("##################################################################################"); + return influxDB; + } } diff --git a/src/test/java/org/influxdb/TicketTest.java b/src/test/java/org/influxdb/TicketTest.java index 29aecf09e..dc373fb6e 100644 --- a/src/test/java/org/influxdb/TicketTest.java +++ b/src/test/java/org/influxdb/TicketTest.java @@ -34,25 +34,7 @@ public class TicketTest { */ @BeforeEach public void setUp() throws InterruptedException, IOException { - this.influxDB = InfluxDBFactory.connect("http://" + TestUtils.getInfluxIP() + ":" + TestUtils.getInfluxPORT(true), "admin", "admin"); - boolean influxDBstarted = false; - do { - Pong response; - try { - response = this.influxDB.ping(); - if (response.isGood()) { - influxDBstarted = true; - } - } catch (Exception e) { - // NOOP intentional - e.printStackTrace(); - } - Thread.sleep(100L); - } while (!influxDBstarted); - this.influxDB.setLogLevel(LogLevel.NONE); - System.out.println("##################################################################################"); - System.out.println("# Connected to InfluxDB Version: " + this.influxDB.version() + " #"); - System.out.println("##################################################################################"); + this.influxDB = TestUtils.connectToInfluxDB(); } /** From f61ae41e816a1ae5d1e07f95a317fa01753c5229 Mon Sep 17 00:00:00 2001 From: dubsky Date: Wed, 13 Dec 2017 16:19:58 +0100 Subject: [PATCH 100/745] fix #396 --- src/main/java/org/influxdb/BatchOptions.java | 60 ++++++++++++++++ src/main/java/org/influxdb/InfluxDB.java | 17 +++++ .../org/influxdb/impl/BatchOptionsImpl.java | 71 +++++++++++++++++++ .../java/org/influxdb/impl/InfluxDBImpl.java | 16 +++++ .../java/org/influxdb/BatchOptionsTest.java | 69 ++++++++++++++++++ 5 files changed, 233 insertions(+) create mode 100644 src/main/java/org/influxdb/BatchOptions.java create mode 100644 src/main/java/org/influxdb/impl/BatchOptionsImpl.java create mode 100644 src/test/java/org/influxdb/BatchOptionsTest.java diff --git a/src/main/java/org/influxdb/BatchOptions.java b/src/main/java/org/influxdb/BatchOptions.java new file mode 100644 index 000000000..cf054229c --- /dev/null +++ b/src/main/java/org/influxdb/BatchOptions.java @@ -0,0 +1,60 @@ +package org.influxdb; + +import org.influxdb.dto.Point; +import org.influxdb.impl.BatchOptionsImpl; + +import java.util.concurrent.ThreadFactory; +import java.util.function.BiConsumer; + +/** + * BatchOptions are used to configure batching of individual data point writes + * into InfluxDB. See {@link InfluxDB#enableBatch(BatchOptions)} + */ +public interface BatchOptions { + + BatchOptions DEFAULTS = BatchOptionsImpl.DEFAULTS; + + /** + * @param actions the number of actions to collect + * @return the BatchOptions instance to be able to use it in a fluent manner. + */ + BatchOptions actions(final int actions); + + /** + * @param flushDuration the time to wait at most (milliseconds). + * @return the BatchOptions instance to be able to use it in a fluent manner. + */ + BatchOptions flushDuration(final int flushDuration); + + /** + * @param threadFactory a ThreadFactory instance to be used + * @return the BatchOptions instance to be able to use it in a fluent manner. + */ + BatchOptions threadFactory(final ThreadFactory threadFactory); + + /** + * @param exceptionHandler a consumer function to handle asynchronous errors + * @return the BatchOptions instance to be able to use it in a fluent manner. + */ + BatchOptions exceptionHandler(final BiConsumer, Throwable> exceptionHandler); + + /** + * @return actions the number of actions to collect + */ + int getActions(); + + /** + * @return flushDuration the time to wait at most (milliseconds). + */ + int getFlushDuration(); + + /** + * @return a ThreadFactory instance to be used + */ + ThreadFactory getThreadFactory(); + + /** + * @return a consumer function to handle asynchronous errors + */ + BiConsumer, Throwable> getExceptionHandler(); +} diff --git a/src/main/java/org/influxdb/InfluxDB.java b/src/main/java/org/influxdb/InfluxDB.java index deb192373..7bfd17d76 100644 --- a/src/main/java/org/influxdb/InfluxDB.java +++ b/src/main/java/org/influxdb/InfluxDB.java @@ -97,6 +97,23 @@ public String value() { */ public boolean isGzipEnabled(); + /** + * Enable batching of single Point writes to speed up writes significantly. This is the same as calling + * InfluxDB.enableBatch(BatchingOptions.DEFAULTS) + * @return the InfluxDB instance to be able to use it in a fluent manner. + */ + public InfluxDB enableBatch(); + + /** + * Enable batching of single Point writes to speed up writes significantly. If either number of points written or + * flushDuration time limit is reached, a batch write is issued. + * Note that batch processing needs to be explicitly stopped before the application is shutdown. + * To do so call disableBatch(). + * + * @return the InfluxDB instance to be able to use it in a fluent manner. + */ + public InfluxDB enableBatch(final BatchOptions batchOptions); + /** * Enable batching of single Point writes as {@link #enableBatch(int, int, TimeUnit, ThreadFactory)}} * using {@linkplain java.util.concurrent.Executors#defaultThreadFactory() default thread factory}. diff --git a/src/main/java/org/influxdb/impl/BatchOptionsImpl.java b/src/main/java/org/influxdb/impl/BatchOptionsImpl.java new file mode 100644 index 000000000..a1ba79bdc --- /dev/null +++ b/src/main/java/org/influxdb/impl/BatchOptionsImpl.java @@ -0,0 +1,71 @@ +package org.influxdb.impl; + +import org.influxdb.BatchOptions; +import org.influxdb.dto.Point; + +import java.util.concurrent.Executors; +import java.util.concurrent.ThreadFactory; +import java.util.function.BiConsumer; + +public class BatchOptionsImpl implements BatchOptions, Cloneable { + + public static BatchOptions DEFAULTS = new BatchOptionsImpl(); + + // default values here are consistent with Telegraf + int actions = 1000; + int flushDuration = 10000; + ThreadFactory threadFactory = Executors.defaultThreadFactory(); + BiConsumer, Throwable> exceptionHandler = (points, throwable) -> { + }; + + private BatchOptionsImpl() { + } + + public BatchOptions actions(final int actions) { + BatchOptionsImpl clone = getClone(); + clone.actions = actions; + return clone; + } + + public BatchOptions flushDuration(final int flushDuration) { + BatchOptionsImpl clone = getClone(); + clone.flushDuration = flushDuration; + return clone; + } + + public BatchOptions threadFactory(final ThreadFactory threadFactory) { + BatchOptionsImpl clone = getClone(); + clone.threadFactory = threadFactory; + return clone; + } + + public BatchOptions exceptionHandler(final BiConsumer, Throwable> exceptionHandler) { + BatchOptionsImpl clone = getClone(); + clone.exceptionHandler = exceptionHandler; + return clone; + } + + private BatchOptionsImpl getClone() { + try { + return (BatchOptionsImpl) this.clone(); + } catch (CloneNotSupportedException e) { + throw new RuntimeException(e); + } + } + + public int getActions() { + return actions; + } + + public int getFlushDuration() { + return flushDuration; + } + + public ThreadFactory getThreadFactory() { + return threadFactory; + } + + public BiConsumer, Throwable> getExceptionHandler() { + return exceptionHandler; + } +} diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index d85600efc..de7cf7b2e 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -12,6 +12,7 @@ import okhttp3.logging.HttpLoggingInterceptor; import okhttp3.logging.HttpLoggingInterceptor.Level; import okio.BufferedSource; +import org.influxdb.BatchOptions; import org.influxdb.InfluxDB; import org.influxdb.InfluxDBException; import org.influxdb.InfluxDBIOException; @@ -185,6 +186,21 @@ public boolean isGzipEnabled() { return this.gzipRequestInterceptor.isEnabled(); } + @Override + public InfluxDB enableBatch() { + enableBatch(BatchOptions.DEFAULTS); + return this; + } + + @Override + public InfluxDB enableBatch(BatchOptions batchOptions) { + enableBatch(batchOptions.getActions(), + batchOptions.getFlushDuration(), + TimeUnit.MILLISECONDS,batchOptions.getThreadFactory(), + batchOptions.getExceptionHandler() ); + return this; + } + @Override public InfluxDB enableBatch(final int actions, final int flushDuration, final TimeUnit flushDurationTimeUnit) { diff --git a/src/test/java/org/influxdb/BatchOptionsTest.java b/src/test/java/org/influxdb/BatchOptionsTest.java new file mode 100644 index 000000000..126f17f84 --- /dev/null +++ b/src/test/java/org/influxdb/BatchOptionsTest.java @@ -0,0 +1,69 @@ +package org.influxdb; + +import org.influxdb.dto.Point; +import org.influxdb.dto.Query; +import org.influxdb.dto.QueryResult; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.platform.runner.JUnitPlatform; +import org.junit.runner.RunWith; + +import java.io.IOException; +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.TimeUnit; + +@RunWith(JUnitPlatform.class) +public class BatchOptionsTest { + + private InfluxDB influxDB; + + @BeforeEach + public void setUp() throws InterruptedException, IOException { + this.influxDB = TestUtils.connectToInfluxDB(); + } + + /** + * Test the implementation of {@link InfluxDB#enableBatch(int, int, TimeUnit, ThreadFactory)}. + */ + @Test + public void testBatchEnabledWithDefaultSettings() { + try { + this.influxDB.enableBatch(); + + } + finally { + this.influxDB.disableBatch(); + } + } + + /** + * Test the implementation of {@link BatchOptions#actions(int)} }. + */ + @Test + public void testActionsSetting() throws InterruptedException { + String dbName = "write_unittest_" + System.currentTimeMillis(); + try { + BatchOptions options = BatchOptions.DEFAULTS.actions(3); + Assertions.assertEquals(3, options.getActions()); + this.influxDB.enableBatch(options); + this.influxDB.createDatabase(dbName); + this.influxDB.setDatabase(dbName); + for (int j = 0; j < 5; j++) { + Point point = Point.measurement("cpu") + .time(j,TimeUnit.MILLISECONDS) + .addField("idle", (double) j) + .addField("user", 2.0 * j) + .addField("system", 3.0 * j).build(); + this.influxDB.write(point); + } + Thread.sleep(500); + QueryResult result=influxDB.query(new Query("select * from cpu", dbName)); + Assertions.assertEquals(3, result.getResults().get(0).getSeries().get(0).getValues().size()); + } + finally { + this.influxDB.disableBatch(); + this.influxDB.deleteDatabase(dbName); + } + } +} From a0b227b7de62fa19abddc7657e7a9d2609b0c936 Mon Sep 17 00:00:00 2001 From: dubsky Date: Wed, 13 Dec 2017 16:22:53 +0100 Subject: [PATCH 101/745] fix #397 fixing checkstyle fixing checkstyle --- src/main/java/org/influxdb/BatchOptions.java | 15 ++++++ .../org/influxdb/impl/BatchOptionsImpl.java | 24 +++++++-- .../org/influxdb/impl/BatchProcessor.java | 50 +++++++++++++++---- .../java/org/influxdb/impl/InfluxDBImpl.java | 16 ++++-- .../java/org/influxdb/BatchOptionsTest.java | 1 + 5 files changed, 87 insertions(+), 19 deletions(-) diff --git a/src/main/java/org/influxdb/BatchOptions.java b/src/main/java/org/influxdb/BatchOptions.java index cf054229c..df5e39a9e 100644 --- a/src/main/java/org/influxdb/BatchOptions.java +++ b/src/main/java/org/influxdb/BatchOptions.java @@ -26,6 +26,16 @@ public interface BatchOptions { */ BatchOptions flushDuration(final int flushDuration); + /** + * Jitters the batch flush interval by a random amount. This is primarily to avoid + * large write spikes for users running a large number of client instances. + * ie, a jitter of 5s and flush duration 10s means flushes will happen every 10-15s. + * + * @param jitterDuration (milliseconds) + * @return the BatchOptions instance to be able to use it in a fluent manner. + */ + BatchOptions jitterDuration(final int jitterDuration); + /** * @param threadFactory a ThreadFactory instance to be used * @return the BatchOptions instance to be able to use it in a fluent manner. @@ -48,6 +58,11 @@ public interface BatchOptions { */ int getFlushDuration(); + /** + * @return batch flush interval jitter value (milliseconds) + */ + int getJitterDuration(); + /** * @return a ThreadFactory instance to be used */ diff --git a/src/main/java/org/influxdb/impl/BatchOptionsImpl.java b/src/main/java/org/influxdb/impl/BatchOptionsImpl.java index a1ba79bdc..380b2befd 100644 --- a/src/main/java/org/influxdb/impl/BatchOptionsImpl.java +++ b/src/main/java/org/influxdb/impl/BatchOptionsImpl.java @@ -7,13 +7,19 @@ import java.util.concurrent.ThreadFactory; import java.util.function.BiConsumer; -public class BatchOptionsImpl implements BatchOptions, Cloneable { +public final class BatchOptionsImpl implements BatchOptions, Cloneable { - public static BatchOptions DEFAULTS = new BatchOptionsImpl(); + public static final BatchOptions DEFAULTS = new BatchOptionsImpl(); // default values here are consistent with Telegraf - int actions = 1000; - int flushDuration = 10000; + public static final int DEFAULT_BATCH_ACTIONS_LIMIT = 1000; + public static final int DEFAULT_BATCH_INTERVAL_DURATION = 1000; + public static final int DEFAULT_JITTER_INTERVAL_DURATION = 0; + + int actions = DEFAULT_BATCH_ACTIONS_LIMIT; + int flushDuration = DEFAULT_BATCH_INTERVAL_DURATION; + int jitterDuration = DEFAULT_JITTER_INTERVAL_DURATION; + ThreadFactory threadFactory = Executors.defaultThreadFactory(); BiConsumer, Throwable> exceptionHandler = (points, throwable) -> { }; @@ -33,6 +39,12 @@ public BatchOptions flushDuration(final int flushDuration) { return clone; } + public BatchOptions jitterDuration(final int jitterDuration) { + BatchOptionsImpl clone = getClone(); + clone.jitterDuration = jitterDuration; + return clone; + } + public BatchOptions threadFactory(final ThreadFactory threadFactory) { BatchOptionsImpl clone = getClone(); clone.threadFactory = threadFactory; @@ -61,6 +73,10 @@ public int getFlushDuration() { return flushDuration; } + public int getJitterDuration() { + return jitterDuration; + } + public ThreadFactory getThreadFactory() { return threadFactory; } diff --git a/src/main/java/org/influxdb/impl/BatchProcessor.java b/src/main/java/org/influxdb/impl/BatchProcessor.java index 35457a55c..65536210e 100644 --- a/src/main/java/org/influxdb/impl/BatchProcessor.java +++ b/src/main/java/org/influxdb/impl/BatchProcessor.java @@ -39,6 +39,7 @@ public final class BatchProcessor { private final TimeUnit flushIntervalUnit; private final int flushInterval; private final ConsistencyLevel consistencyLevel; + private final int jitterInterval; /** * The Builder to create a BatchProcessor instance. @@ -49,6 +50,7 @@ public static final class Builder { private int actions; private TimeUnit flushIntervalUnit; private int flushInterval; + private int jitterInterval; private BiConsumer, Throwable> exceptionHandler = (entries, throwable) -> { }; private ConsistencyLevel consistencyLevel; @@ -98,6 +100,25 @@ public Builder interval(final int interval, final TimeUnit unit) { return this; } + /** + * The interval at which at least should issued a write. + * + * @param flushInterval + * the flush interval + * @param jitterInterval + * the flush jitter interval + * @param unit + * the TimeUnit of the interval + * + * @return this Builder to use it fluent + */ + public Builder interval(final int flushInterval, final int jitterInterval, final TimeUnit unit) { + this.flushInterval = flushInterval; + this.jitterInterval = jitterInterval; + this.flushIntervalUnit = unit; + return this; + } + /** * A callback to be used when an error occurs during a batchwrite. * @@ -135,9 +156,8 @@ public BatchProcessor build() { Objects.requireNonNull(this.flushIntervalUnit, "flushIntervalUnit"); Objects.requireNonNull(this.threadFactory, "threadFactory"); Objects.requireNonNull(this.exceptionHandler, "exceptionHandler"); - return new BatchProcessor(this.influxDB, this.threadFactory, - this.actions, this.flushIntervalUnit, - this.flushInterval, exceptionHandler, this.consistencyLevel); + return new BatchProcessor(this.influxDB, this.threadFactory, this.actions, this.flushIntervalUnit, + this.flushInterval, this.jitterInterval, exceptionHandler, this.consistencyLevel); } } @@ -196,15 +216,16 @@ public static Builder builder(final InfluxDB influxDB) { return new Builder(influxDB); } - private BatchProcessor(final InfluxDBImpl influxDB, final ThreadFactory threadFactory, final int actions, - final TimeUnit flushIntervalUnit, final int flushInterval, - final BiConsumer, Throwable> exceptionHandler, - final ConsistencyLevel consistencyLevel) { + BatchProcessor(final InfluxDBImpl influxDB, final ThreadFactory threadFactory, final int actions, + final TimeUnit flushIntervalUnit, final int flushInterval, final int jitterInterval, + final BiConsumer, Throwable> exceptionHandler, + final ConsistencyLevel consistencyLevel) { super(); this.influxDB = influxDB; this.actions = actions; this.flushIntervalUnit = flushIntervalUnit; this.flushInterval = flushInterval; + this.jitterInterval = jitterInterval; this.scheduler = Executors.newSingleThreadScheduledExecutor(threadFactory); this.exceptionHandler = exceptionHandler; this.consistencyLevel = consistencyLevel; @@ -213,14 +234,21 @@ private BatchProcessor(final InfluxDBImpl influxDB, final ThreadFactory threadFa } else { this.queue = new LinkedBlockingQueue<>(); } - // Flush at specified Rate - this.scheduler.scheduleAtFixedRate(new Runnable() { + + Runnable flushRunnable = new Runnable() { @Override public void run() { + // write doesn't throw any exceptions write(); + int jitterInterval = (int) (Math.random() * BatchProcessor.this.jitterInterval); + BatchProcessor.this.scheduler.schedule(this, + BatchProcessor.this.flushInterval + jitterInterval, BatchProcessor.this.flushIntervalUnit); } - }, this.flushInterval, this.flushInterval, this.flushIntervalUnit); - + }; + // Flush at specified Rate + this.scheduler.schedule(flushRunnable, + this.flushInterval + (int) (Math.random() * BatchProcessor.this.jitterInterval), + this.flushIntervalUnit); } void write() { diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index de7cf7b2e..0c12bcb9b 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -193,11 +193,12 @@ public InfluxDB enableBatch() { } @Override - public InfluxDB enableBatch(BatchOptions batchOptions) { + public InfluxDB enableBatch(final BatchOptions batchOptions) { enableBatch(batchOptions.getActions(), batchOptions.getFlushDuration(), - TimeUnit.MILLISECONDS,batchOptions.getThreadFactory(), - batchOptions.getExceptionHandler() ); + batchOptions.getJitterDuration(), + TimeUnit.MILLISECONDS, batchOptions.getThreadFactory(), + batchOptions.getExceptionHandler()); return this; } @@ -229,6 +230,13 @@ public InfluxDB enableBatch(final int actions, final int flushDuration, final Ti public InfluxDB enableBatch(final int actions, final int flushDuration, final TimeUnit flushDurationTimeUnit, final ThreadFactory threadFactory, final BiConsumer, Throwable> exceptionHandler) { + enableBatch(actions, flushDuration, 0, flushDurationTimeUnit, threadFactory, exceptionHandler); + return this; + } + + private InfluxDB enableBatch(final int actions, final int flushDuration, final int jitterDuration, + final TimeUnit durationTimeUnit, final ThreadFactory threadFactory, + final BiConsumer, Throwable> exceptionHandler) { if (this.batchEnabled.get()) { throw new IllegalStateException("BatchProcessing is already enabled."); } @@ -236,7 +244,7 @@ public InfluxDB enableBatch(final int actions, final int flushDuration, final Ti .builder(this) .actions(actions) .exceptionHandler(exceptionHandler) - .interval(flushDuration, flushDurationTimeUnit) + .interval(flushDuration, jitterDuration, durationTimeUnit) .threadFactory(threadFactory) .consistencyLevel(consistency) .build(); diff --git a/src/test/java/org/influxdb/BatchOptionsTest.java b/src/test/java/org/influxdb/BatchOptionsTest.java index 126f17f84..f58a685af 100644 --- a/src/test/java/org/influxdb/BatchOptionsTest.java +++ b/src/test/java/org/influxdb/BatchOptionsTest.java @@ -66,4 +66,5 @@ public void testActionsSetting() throws InterruptedException { this.influxDB.deleteDatabase(dbName); } } + } From 8c2f2e6fc75bc93908c87b2722d3dc45b726e6ca Mon Sep 17 00:00:00 2001 From: dubsky Date: Tue, 9 Jan 2018 17:05:13 +0100 Subject: [PATCH 102/745] implementing cluster consistency setting into batch options --- src/main/java/org/influxdb/BatchOptions.java | 17 ++++++++++++++++- .../org/influxdb/impl/BatchOptionsImpl.java | 12 ++++++++++++ 2 files changed, 28 insertions(+), 1 deletion(-) diff --git a/src/main/java/org/influxdb/BatchOptions.java b/src/main/java/org/influxdb/BatchOptions.java index df5e39a9e..26e654f24 100644 --- a/src/main/java/org/influxdb/BatchOptions.java +++ b/src/main/java/org/influxdb/BatchOptions.java @@ -49,8 +49,16 @@ public interface BatchOptions { BatchOptions exceptionHandler(final BiConsumer, Throwable> exceptionHandler); /** - * @return actions the number of actions to collect + * @param consistency cluster consistency setting (how many nodes have to store data points + * to treat a write as a success) + * @return the BatchOptions instance to be able to use it in a fluent manner. */ + BatchOptions setConsistency(final InfluxDB.ConsistencyLevel consistency); + + + /** + * @return actions the number of actions to collect + */ int getActions(); /** @@ -72,4 +80,11 @@ public interface BatchOptions { * @return a consumer function to handle asynchronous errors */ BiConsumer, Throwable> getExceptionHandler(); + + /** + * @return cluster consistency setting (how many nodes have to store data points + * to treat a write as a success) + */ + InfluxDB.ConsistencyLevel getConsistency(); + } diff --git a/src/main/java/org/influxdb/impl/BatchOptionsImpl.java b/src/main/java/org/influxdb/impl/BatchOptionsImpl.java index 380b2befd..2fd8bba8d 100644 --- a/src/main/java/org/influxdb/impl/BatchOptionsImpl.java +++ b/src/main/java/org/influxdb/impl/BatchOptionsImpl.java @@ -1,6 +1,7 @@ package org.influxdb.impl; import org.influxdb.BatchOptions; +import org.influxdb.InfluxDB; import org.influxdb.dto.Point; import java.util.concurrent.Executors; @@ -23,6 +24,7 @@ public final class BatchOptionsImpl implements BatchOptions, Cloneable { ThreadFactory threadFactory = Executors.defaultThreadFactory(); BiConsumer, Throwable> exceptionHandler = (points, throwable) -> { }; + InfluxDB.ConsistencyLevel consistency = InfluxDB.ConsistencyLevel.ONE; private BatchOptionsImpl() { } @@ -57,6 +59,12 @@ public BatchOptions exceptionHandler(final BiConsumer, Throwable return clone; } + public BatchOptions setConsistency(final InfluxDB.ConsistencyLevel consistency) { + BatchOptionsImpl clone = getClone(); + clone.consistency = consistency; + return clone; + } + private BatchOptionsImpl getClone() { try { return (BatchOptionsImpl) this.clone(); @@ -77,6 +85,10 @@ public int getJitterDuration() { return jitterDuration; } + public InfluxDB.ConsistencyLevel getConsistency() { + return consistency; + } + public ThreadFactory getThreadFactory() { return threadFactory; } From e3f3db52a74801fcd30430a0886e348a663da604 Mon Sep 17 00:00:00 2001 From: dubsky Date: Wed, 13 Dec 2017 13:40:20 +0100 Subject: [PATCH 103/745] separating InfluxDB initialization into TestUtils --- src/test/java/org/influxdb/TestUtils.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/org/influxdb/TestUtils.java b/src/test/java/org/influxdb/TestUtils.java index 865ecdcf2..e85f6feaf 100644 --- a/src/test/java/org/influxdb/TestUtils.java +++ b/src/test/java/org/influxdb/TestUtils.java @@ -65,7 +65,7 @@ public static InfluxDB connectToInfluxDB() throws InterruptedException, IOExcept } while (!influxDBstarted); influxDB.setLogLevel(InfluxDB.LogLevel.NONE); System.out.println("##################################################################################"); - System.out.println("# Connected to InfluxDB Version: " + this.influxDB.version() + " #"); + System.out.println("# Connected to InfluxDB Version: " + influxDB.version() + " #"); System.out.println("##################################################################################"); return influxDB; } From 92a58877cb186265af740458e7eefe4abe231104 Mon Sep 17 00:00:00 2001 From: dubsky Date: Wed, 13 Dec 2017 16:19:58 +0100 Subject: [PATCH 104/745] fix #396 --- src/main/java/org/influxdb/BatchOptions.java | 6 +++--- src/main/java/org/influxdb/impl/InfluxDBImpl.java | 1 + src/test/java/org/influxdb/BatchOptionsTest.java | 1 - 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/main/java/org/influxdb/BatchOptions.java b/src/main/java/org/influxdb/BatchOptions.java index 26e654f24..8732a245a 100644 --- a/src/main/java/org/influxdb/BatchOptions.java +++ b/src/main/java/org/influxdb/BatchOptions.java @@ -56,9 +56,9 @@ public interface BatchOptions { BatchOptions setConsistency(final InfluxDB.ConsistencyLevel consistency); - /** - * @return actions the number of actions to collect - */ + /** + * @return actions the number of actions to collect + */ int getActions(); /** diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index 0c12bcb9b..8fe0d4255 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -12,6 +12,7 @@ import okhttp3.logging.HttpLoggingInterceptor; import okhttp3.logging.HttpLoggingInterceptor.Level; import okio.BufferedSource; + import org.influxdb.BatchOptions; import org.influxdb.InfluxDB; import org.influxdb.InfluxDBException; diff --git a/src/test/java/org/influxdb/BatchOptionsTest.java b/src/test/java/org/influxdb/BatchOptionsTest.java index f58a685af..126f17f84 100644 --- a/src/test/java/org/influxdb/BatchOptionsTest.java +++ b/src/test/java/org/influxdb/BatchOptionsTest.java @@ -66,5 +66,4 @@ public void testActionsSetting() throws InterruptedException { this.influxDB.deleteDatabase(dbName); } } - } From d5155e0f2b8749d0bd7a1591c33c9023f8c75956 Mon Sep 17 00:00:00 2001 From: dubsky Date: Wed, 13 Dec 2017 16:22:53 +0100 Subject: [PATCH 105/745] fix #397 fixing checkstyle fixing checkstyle --- src/test/java/org/influxdb/BatchOptionsTest.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/test/java/org/influxdb/BatchOptionsTest.java b/src/test/java/org/influxdb/BatchOptionsTest.java index 126f17f84..f58a685af 100644 --- a/src/test/java/org/influxdb/BatchOptionsTest.java +++ b/src/test/java/org/influxdb/BatchOptionsTest.java @@ -66,4 +66,5 @@ public void testActionsSetting() throws InterruptedException { this.influxDB.deleteDatabase(dbName); } } + } From 7bdd5e5b459bcaa52aa1218dd7bc3b3e06ef988e Mon Sep 17 00:00:00 2001 From: dubsky Date: Wed, 3 Jan 2018 20:37:39 +0100 Subject: [PATCH 106/745] Adding analysis of error returned by InfluxDB for further use --- pom.xml | 5 ++ .../java/org/influxdb/InfluxDBException.java | 80 +++++++++++++++++++ .../java/org/influxdb/impl/InfluxDBImpl.java | 10 +++ 3 files changed, 95 insertions(+) diff --git a/pom.xml b/pom.xml index bd92d18d7..6510af401 100644 --- a/pom.xml +++ b/pom.xml @@ -261,5 +261,10 @@ logging-interceptor 3.9.1 + + org.json + json + 20171018 + diff --git a/src/main/java/org/influxdb/InfluxDBException.java b/src/main/java/org/influxdb/InfluxDBException.java index 69f28e0b3..dd7939d0d 100644 --- a/src/main/java/org/influxdb/InfluxDBException.java +++ b/src/main/java/org/influxdb/InfluxDBException.java @@ -18,4 +18,84 @@ public InfluxDBException(final String message, final Throwable cause) { public InfluxDBException(final Throwable cause) { super(cause); } + + /** + * @return true if the operation may succeed if repeated, false otherwise. + */ + public boolean isRetryWorth() { + return true; + } + + static final String FIELD_TYPE_CONFLICT_ERROR="field type conflict"; + static final String POINTS_BEYOND_RETENTION_POLICY_ERROR="points beyond retention policy"; + static final String UNABLE_TO_PARSE_ERROR="unable to parse"; + static final String HINTED_HAND_OFF_QUEUE_NOT_EMPTY_ERROR="hinted handoff queue not empty"; + static final String DATABASE_NOT_FOUND_ERROR="database not found"; + static final String CACHE_MAX_MEMORY_SIZE_EXCEEDED_ERROR="cache-max-memory-size exceeded"; + + public static class DatabaseNotFoundError extends InfluxDBException { + private DatabaseNotFoundError(String message) { + super(message); + } + + public boolean isRetryWorth() { + return false; + } + } + + public static class HintedHandOffQueueNotEmptyException extends InfluxDBException { + private HintedHandOffQueueNotEmptyException(String message) { + super(message); + } + public boolean isRetryWorth() { + return false; + } + } + + public static class UnableToParseException extends InfluxDBException { + private UnableToParseException(String message) { + super(message); + } + public boolean isRetryWorth() { + return false; + } + } + + public static class FieldTypeConflictException extends InfluxDBException { + private FieldTypeConflictException(String message) { + super(message); + } + public boolean isRetryWorth() { + return false; + } + } + + public static class PointsBeyondRetentionPolicyException extends InfluxDBException { + private PointsBeyondRetentionPolicyException(String message) { + super(message); + } + public boolean isRetryWorth() { + return false; + } + } + + public static class CacheMaxMemorySizeExceededException extends InfluxDBException { + private CacheMaxMemorySizeExceededException(String message) { + super(message); + } + public boolean isRetryWorth() { + return true; + } + } + + public static InfluxDBException buildExceptionForErrorState(String error) { + if(error.contains(DATABASE_NOT_FOUND_ERROR)) return new DatabaseNotFoundError(error); + if(error.contains(POINTS_BEYOND_RETENTION_POLICY_ERROR)) return new PointsBeyondRetentionPolicyException(error); + if(error.contains(FIELD_TYPE_CONFLICT_ERROR)) return new FieldTypeConflictException(error); + if(error.contains(UNABLE_TO_PARSE_ERROR)) return new UnableToParseException(error); + if(error.contains(HINTED_HAND_OFF_QUEUE_NOT_EMPTY_ERROR)) return new HintedHandOffQueueNotEmptyException(error); + if(error.contains(CACHE_MAX_MEMORY_SIZE_EXCEEDED_ERROR)) return new CacheMaxMemorySizeExceededException(error); + throw new InfluxDBException(error); + } + } diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index 8fe0d4255..843ffb01e 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -24,6 +24,8 @@ import org.influxdb.dto.QueryResult; import org.influxdb.impl.BatchProcessor.HttpBatchEntry; import org.influxdb.impl.BatchProcessor.UdpBatchEntry; +import org.json.JSONException; +import org.json.JSONObject; import retrofit2.Call; import retrofit2.Callback; import retrofit2.Response; @@ -569,6 +571,14 @@ private T execute(final Call call) { return response.body(); } try (ResponseBody errorBody = response.errorBody()) { + try { + JSONObject body=new JSONObject(errorBody.string()); + Object error=body.getString("error"); + if(error!=null && error instanceof String) { + throw InfluxDBException.buildExceptionForErrorState((String) error); + } + } + catch(JSONException e) {} throw new InfluxDBException(errorBody.string()); } } catch (IOException e) { From 3fe6a1a8acc28c5b4187b057fd2d9f408d80f371 Mon Sep 17 00:00:00 2001 From: dubsky Date: Mon, 8 Jan 2018 18:06:04 +0100 Subject: [PATCH 107/745] implementing retry buffer for failed writes due to occupancy of the influxdb server --- checkstyle.xml | 5 +- src/main/java/org/influxdb/BatchOptions.java | 17 +++ .../java/org/influxdb/InfluxDBException.java | 79 ++++++++---- .../java/org/influxdb/dto/BatchPoints.java | 27 ++++ .../org/influxdb/impl/BatchOptionsImpl.java | 19 ++- .../org/influxdb/impl/BatchProcessor.java | 35 +++++- .../java/org/influxdb/impl/BatchWriter.java | 15 +++ .../java/org/influxdb/impl/InfluxDBImpl.java | 26 ++-- .../org/influxdb/impl/OneShotBatchWriter.java | 21 ++++ .../impl/RetryCapableBatchWriter.java | 119 ++++++++++++++++++ 10 files changed, 320 insertions(+), 43 deletions(-) create mode 100644 src/main/java/org/influxdb/impl/BatchWriter.java create mode 100644 src/main/java/org/influxdb/impl/OneShotBatchWriter.java create mode 100644 src/main/java/org/influxdb/impl/RetryCapableBatchWriter.java diff --git a/checkstyle.xml b/checkstyle.xml index d27f8b51d..d739eb82f 100644 --- a/checkstyle.xml +++ b/checkstyle.xml @@ -28,8 +28,9 @@ - - + diff --git a/src/main/java/org/influxdb/BatchOptions.java b/src/main/java/org/influxdb/BatchOptions.java index 8732a245a..1d7e9c727 100644 --- a/src/main/java/org/influxdb/BatchOptions.java +++ b/src/main/java/org/influxdb/BatchOptions.java @@ -36,6 +36,18 @@ public interface BatchOptions { */ BatchOptions jitterDuration(final int jitterDuration); + /** + * The client maintains a buffer for failed writes so that the writes will be retried later on. This may + * help to overcome temporary network problems or InfluxDB load spikes. + * When the buffer is full and new points are written, oldest entries in the buffer are lost. + * + * To disable this feature set buffer limit to a value smaller than {@link BatchOptions#getActions} + * + * @param bufferLimit maximum number of points stored in the retry buffer + * @return the BatchOptions instance to be able to use it in a fluent manner. + */ + BatchOptions bufferLimit(final int bufferLimit); + /** * @param threadFactory a ThreadFactory instance to be used * @return the BatchOptions instance to be able to use it in a fluent manner. @@ -71,6 +83,11 @@ public interface BatchOptions { */ int getJitterDuration(); + /** + * @return Maximum number of points stored in the retry buffer, see {@link BatchOptions#bufferLimit(int)} + */ + int getBufferLimit(); + /** * @return a ThreadFactory instance to be used */ diff --git a/src/main/java/org/influxdb/InfluxDBException.java b/src/main/java/org/influxdb/InfluxDBException.java index dd7939d0d..5444bb354 100644 --- a/src/main/java/org/influxdb/InfluxDBException.java +++ b/src/main/java/org/influxdb/InfluxDBException.java @@ -26,15 +26,15 @@ public boolean isRetryWorth() { return true; } - static final String FIELD_TYPE_CONFLICT_ERROR="field type conflict"; - static final String POINTS_BEYOND_RETENTION_POLICY_ERROR="points beyond retention policy"; - static final String UNABLE_TO_PARSE_ERROR="unable to parse"; - static final String HINTED_HAND_OFF_QUEUE_NOT_EMPTY_ERROR="hinted handoff queue not empty"; - static final String DATABASE_NOT_FOUND_ERROR="database not found"; - static final String CACHE_MAX_MEMORY_SIZE_EXCEEDED_ERROR="cache-max-memory-size exceeded"; - - public static class DatabaseNotFoundError extends InfluxDBException { - private DatabaseNotFoundError(String message) { + static final String FIELD_TYPE_CONFLICT_ERROR = "field type conflict"; + static final String POINTS_BEYOND_RETENTION_POLICY_ERROR = "points beyond retention policy"; + static final String UNABLE_TO_PARSE_ERROR = "unable to parse"; + static final String HINTED_HAND_OFF_QUEUE_NOT_EMPTY_ERROR = "hinted handoff queue not empty"; + static final String DATABASE_NOT_FOUND_ERROR = "database not found"; + static final String CACHE_MAX_MEMORY_SIZE_EXCEEDED_ERROR = "cache-max-memory-size exceeded"; + + public static final class DatabaseNotFoundError extends InfluxDBException { + private DatabaseNotFoundError(final String message) { super(message); } @@ -43,58 +43,85 @@ public boolean isRetryWorth() { } } - public static class HintedHandOffQueueNotEmptyException extends InfluxDBException { - private HintedHandOffQueueNotEmptyException(String message) { + public static final class HintedHandOffQueueNotEmptyException extends InfluxDBException { + private HintedHandOffQueueNotEmptyException(final String message) { super(message); } + public boolean isRetryWorth() { return false; } } - public static class UnableToParseException extends InfluxDBException { - private UnableToParseException(String message) { + public static final class UnableToParseException extends InfluxDBException { + private UnableToParseException(final String message) { super(message); } + public boolean isRetryWorth() { return false; } } - public static class FieldTypeConflictException extends InfluxDBException { - private FieldTypeConflictException(String message) { + public static final class FieldTypeConflictException extends InfluxDBException { + private FieldTypeConflictException(final String message) { super(message); } + public boolean isRetryWorth() { return false; } } - public static class PointsBeyondRetentionPolicyException extends InfluxDBException { - private PointsBeyondRetentionPolicyException(String message) { + public static final class PointsBeyondRetentionPolicyException extends InfluxDBException { + private PointsBeyondRetentionPolicyException(final String message) { super(message); } + public boolean isRetryWorth() { return false; } } - public static class CacheMaxMemorySizeExceededException extends InfluxDBException { - private CacheMaxMemorySizeExceededException(String message) { + public static final class CacheMaxMemorySizeExceededException extends InfluxDBException { + private CacheMaxMemorySizeExceededException(final String message) { super(message); } + public boolean isRetryWorth() { return true; } } - public static InfluxDBException buildExceptionForErrorState(String error) { - if(error.contains(DATABASE_NOT_FOUND_ERROR)) return new DatabaseNotFoundError(error); - if(error.contains(POINTS_BEYOND_RETENTION_POLICY_ERROR)) return new PointsBeyondRetentionPolicyException(error); - if(error.contains(FIELD_TYPE_CONFLICT_ERROR)) return new FieldTypeConflictException(error); - if(error.contains(UNABLE_TO_PARSE_ERROR)) return new UnableToParseException(error); - if(error.contains(HINTED_HAND_OFF_QUEUE_NOT_EMPTY_ERROR)) return new HintedHandOffQueueNotEmptyException(error); - if(error.contains(CACHE_MAX_MEMORY_SIZE_EXCEEDED_ERROR)) return new CacheMaxMemorySizeExceededException(error); + public static final class RetryBufferOverrunException extends InfluxDBException { + public RetryBufferOverrunException(final String message) { + super(message); + } + + public boolean isRetryWorth() { + return false; + } + } + + public static InfluxDBException buildExceptionForErrorState(final String error) { + if (error.contains(DATABASE_NOT_FOUND_ERROR)) { + return new DatabaseNotFoundError(error); + } + if (error.contains(POINTS_BEYOND_RETENTION_POLICY_ERROR)) { + return new PointsBeyondRetentionPolicyException(error); + } + if (error.contains(FIELD_TYPE_CONFLICT_ERROR)) { + return new FieldTypeConflictException(error); + } + if (error.contains(UNABLE_TO_PARSE_ERROR)) { + return new UnableToParseException(error); + } + if (error.contains(HINTED_HAND_OFF_QUEUE_NOT_EMPTY_ERROR)) { + return new HintedHandOffQueueNotEmptyException(error); + } + if (error.contains(CACHE_MAX_MEMORY_SIZE_EXCEEDED_ERROR)) { + return new CacheMaxMemorySizeExceededException(error); + } throw new InfluxDBException(error); } diff --git a/src/main/java/org/influxdb/dto/BatchPoints.java b/src/main/java/org/influxdb/dto/BatchPoints.java index c67ddcf28..9d29d6cd3 100644 --- a/src/main/java/org/influxdb/dto/BatchPoints.java +++ b/src/main/java/org/influxdb/dto/BatchPoints.java @@ -280,4 +280,31 @@ public String lineProtocol() { } return sb.toString(); } + + /** + * Test whether is possible to merge two BatchPoints objects. + * + * @param that batch point to merge in + * @return true if the batch points can be sent in a single HTTP request write + */ + public boolean isMergeAbleWith(final BatchPoints that) { + return Objects.equals(database, that.database) + && Objects.equals(retentionPolicy, that.retentionPolicy) + && Objects.equals(tags, that.tags) + && consistency == that.consistency; + } + + /** + * Merge two BatchPoints objects. + * + * @param that batch point to merge in + * @return true if the batch points have been merged into this BatchPoints instance. Return false otherwise. + */ + public boolean mergeIn(final BatchPoints that) { + boolean mergeAble = isMergeAbleWith(that); + if (mergeAble) { + this.points.addAll(that.points); + } + return mergeAble; + } } diff --git a/src/main/java/org/influxdb/impl/BatchOptionsImpl.java b/src/main/java/org/influxdb/impl/BatchOptionsImpl.java index 2fd8bba8d..5edbc995f 100644 --- a/src/main/java/org/influxdb/impl/BatchOptionsImpl.java +++ b/src/main/java/org/influxdb/impl/BatchOptionsImpl.java @@ -16,10 +16,12 @@ public final class BatchOptionsImpl implements BatchOptions, Cloneable { public static final int DEFAULT_BATCH_ACTIONS_LIMIT = 1000; public static final int DEFAULT_BATCH_INTERVAL_DURATION = 1000; public static final int DEFAULT_JITTER_INTERVAL_DURATION = 0; + public static final int DEFAULT_BUFFER_LIMIT = 10000; - int actions = DEFAULT_BATCH_ACTIONS_LIMIT; - int flushDuration = DEFAULT_BATCH_INTERVAL_DURATION; - int jitterDuration = DEFAULT_JITTER_INTERVAL_DURATION; + private int actions = DEFAULT_BATCH_ACTIONS_LIMIT; + private int flushDuration = DEFAULT_BATCH_INTERVAL_DURATION; + private int jitterDuration = DEFAULT_JITTER_INTERVAL_DURATION; + private int bufferLimit = DEFAULT_BUFFER_LIMIT; ThreadFactory threadFactory = Executors.defaultThreadFactory(); BiConsumer, Throwable> exceptionHandler = (points, throwable) -> { @@ -47,6 +49,12 @@ public BatchOptions jitterDuration(final int jitterDuration) { return clone; } + public BatchOptions bufferLimit(final int bufferLimit) { + BatchOptionsImpl clone = getClone(); + clone.bufferLimit = bufferLimit; + return clone; + } + public BatchOptions threadFactory(final ThreadFactory threadFactory) { BatchOptionsImpl clone = getClone(); clone.threadFactory = threadFactory; @@ -96,4 +104,9 @@ public ThreadFactory getThreadFactory() { public BiConsumer, Throwable> getExceptionHandler() { return exceptionHandler; } + + @Override + public int getBufferLimit() { + return bufferLimit; + } } diff --git a/src/main/java/org/influxdb/impl/BatchProcessor.java b/src/main/java/org/influxdb/impl/BatchProcessor.java index 65536210e..f2f3ad013 100644 --- a/src/main/java/org/influxdb/impl/BatchProcessor.java +++ b/src/main/java/org/influxdb/impl/BatchProcessor.java @@ -21,6 +21,11 @@ import java.util.logging.Level; import java.util.logging.Logger; +import org.influxdb.InfluxDB; +import org.influxdb.dto.BatchPoints; +import org.influxdb.dto.Point; + + /** * A BatchProcessor can be attached to a InfluxDB Instance to collect single point writes and * aggregates them to BatchPoints to get a better write performance. @@ -40,6 +45,7 @@ public final class BatchProcessor { private final int flushInterval; private final ConsistencyLevel consistencyLevel; private final int jitterInterval; + private final BatchWriter batchWriter; /** * The Builder to create a BatchProcessor instance. @@ -51,6 +57,10 @@ public static final class Builder { private TimeUnit flushIntervalUnit; private int flushInterval; private int jitterInterval; + // this is a default value if the InfluxDb.enableBatch(BatchOptions) IS NOT used + // the reason is backward compatibility + private int bufferLimit = 0; + private BiConsumer, Throwable> exceptionHandler = (entries, throwable) -> { }; private ConsistencyLevel consistencyLevel; @@ -119,6 +129,18 @@ public Builder interval(final int flushInterval, final int jitterInterval, final return this; } + /** + * A buffer for failed writes so that the writes will be retried later on. When the buffer is full and + * new points are written, oldest entries in the buffer are lost. + * + * @param bufferLimit maximum number of points stored in the buffer + * @return this Builder to use it fluent + */ + public Builder bufferLimit(final int bufferLimit) { + this.bufferLimit = bufferLimit; + return this; + } + /** * A callback to be used when an error occurs during a batchwrite. * @@ -156,7 +178,13 @@ public BatchProcessor build() { Objects.requireNonNull(this.flushIntervalUnit, "flushIntervalUnit"); Objects.requireNonNull(this.threadFactory, "threadFactory"); Objects.requireNonNull(this.exceptionHandler, "exceptionHandler"); - return new BatchProcessor(this.influxDB, this.threadFactory, this.actions, this.flushIntervalUnit, + BatchWriter batchWriter; + if (this.bufferLimit > this.actions) { + batchWriter = new RetryCapableBatchWriter(this.influxDB, this.exceptionHandler, this.bufferLimit, this.actions); + } else { + batchWriter = new OneShotBatchWriter(this.influxDB); + } + return new BatchProcessor(this.influxDB, batchWriter, this.threadFactory, this.actions, this.flushIntervalUnit, this.flushInterval, this.jitterInterval, exceptionHandler, this.consistencyLevel); } } @@ -216,12 +244,13 @@ public static Builder builder(final InfluxDB influxDB) { return new Builder(influxDB); } - BatchProcessor(final InfluxDBImpl influxDB, final ThreadFactory threadFactory, final int actions, + BatchProcessor(final InfluxDBImpl influxDB, final BatchWriter batchWriter, final ThreadFactory threadFactory, final int actions, final TimeUnit flushIntervalUnit, final int flushInterval, final int jitterInterval, final BiConsumer, Throwable> exceptionHandler, final ConsistencyLevel consistencyLevel) { super(); this.influxDB = influxDB; + this.batchWriter = batchWriter; this.actions = actions; this.flushIntervalUnit = flushIntervalUnit; this.flushInterval = flushInterval; @@ -291,7 +320,7 @@ void write() { } for (BatchPoints batchPoints : batchKeyToBatchPoints.values()) { - BatchProcessor.this.influxDB.write(batchPoints); + BatchProcessor.this.batchWriter.write(batchPoints); } for (Entry> entry : udpPortToBatchPoints.entrySet()) { for (String lineprotocolStr : entry.getValue()) { diff --git a/src/main/java/org/influxdb/impl/BatchWriter.java b/src/main/java/org/influxdb/impl/BatchWriter.java new file mode 100644 index 000000000..e2a169143 --- /dev/null +++ b/src/main/java/org/influxdb/impl/BatchWriter.java @@ -0,0 +1,15 @@ +package org.influxdb.impl; + +import org.influxdb.dto.BatchPoints; + +/** + * Write individual batches to InfluxDB. + */ +interface BatchWriter { + /** + * Write the given batch into InfluxDB. + * @param batchPoints to write + */ + void write(BatchPoints batchPoints); +} + diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index 843ffb01e..720f443d5 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -197,11 +197,19 @@ public InfluxDB enableBatch() { @Override public InfluxDB enableBatch(final BatchOptions batchOptions) { - enableBatch(batchOptions.getActions(), - batchOptions.getFlushDuration(), - batchOptions.getJitterDuration(), - TimeUnit.MILLISECONDS, batchOptions.getThreadFactory(), - batchOptions.getExceptionHandler()); + + if (this.batchEnabled.get()) { + throw new IllegalStateException("BatchProcessing is already enabled."); + } + this.batchProcessor = BatchProcessor + .builder(this) + .actions(batchOptions.getActions()) + .exceptionHandler(batchOptions.getExceptionHandler()) + .interval(batchOptions.getFlushDuration(), batchOptions.getJitterDuration(), TimeUnit.MILLISECONDS) + .threadFactory(batchOptions.getThreadFactory()) + .bufferLimit(batchOptions.getBufferLimit()) + .build(); + this.batchEnabled.set(true); return this; } @@ -572,13 +580,13 @@ private T execute(final Call call) { } try (ResponseBody errorBody = response.errorBody()) { try { - JSONObject body=new JSONObject(errorBody.string()); - Object error=body.getString("error"); - if(error!=null && error instanceof String) { + JSONObject body = new JSONObject(errorBody.string()); + Object error = body.getString("error"); + if (error != null && error instanceof String) { throw InfluxDBException.buildExceptionForErrorState((String) error); } + } catch (JSONException e) { } - catch(JSONException e) {} throw new InfluxDBException(errorBody.string()); } } catch (IOException e) { diff --git a/src/main/java/org/influxdb/impl/OneShotBatchWriter.java b/src/main/java/org/influxdb/impl/OneShotBatchWriter.java new file mode 100644 index 000000000..c6594f6f5 --- /dev/null +++ b/src/main/java/org/influxdb/impl/OneShotBatchWriter.java @@ -0,0 +1,21 @@ +package org.influxdb.impl; + +import org.influxdb.InfluxDB; +import org.influxdb.dto.BatchPoints; + +/** + * Batch writer that tries to write BatchPoints exactly once. + */ +class OneShotBatchWriter implements BatchWriter { + + private InfluxDB influxDB; + + OneShotBatchWriter(final InfluxDB influxDB) { + this.influxDB = influxDB; + } + + @Override + public void write(final BatchPoints batchPoints) { + influxDB.write(batchPoints); + } +} diff --git a/src/main/java/org/influxdb/impl/RetryCapableBatchWriter.java b/src/main/java/org/influxdb/impl/RetryCapableBatchWriter.java new file mode 100644 index 000000000..bf3723374 --- /dev/null +++ b/src/main/java/org/influxdb/impl/RetryCapableBatchWriter.java @@ -0,0 +1,119 @@ +package org.influxdb.impl; + +import org.influxdb.InfluxDB; +import org.influxdb.InfluxDBException; +import org.influxdb.dto.BatchPoints; +import org.influxdb.dto.Point; + +import java.util.LinkedList; +import java.util.List; +import java.util.ListIterator; +import java.util.function.BiConsumer; + +/** + * Batch writer that tries to retry a write if it failed previously and + * the reason of the failure is not permanent. + */ +class RetryCapableBatchWriter implements BatchWriter { + + private InfluxDB influxDB; + private BiConsumer, Throwable> exceptionHandler; + private LinkedList batchQueue; + private int requestActionsLimit; + private int retryBufferCapacity; + private int usedRetryBufferCapacity; + + RetryCapableBatchWriter(final InfluxDB influxDB, final BiConsumer, Throwable> exceptionHandler, + final int retryBufferCapacity, final int requestActionsLimit) { + this.influxDB = influxDB; + this.exceptionHandler = exceptionHandler; + batchQueue = new LinkedList<>(); + this.retryBufferCapacity = retryBufferCapacity; + this.requestActionsLimit = requestActionsLimit; + } + + private enum WriteResultOutcome { WRITTEN, FAILED_RETRY_POSSIBLE, FAILED_RETRY_IMPOSSIBLE } + + private static class WriteResult { + + static final WriteResult WRITTEN = new WriteResult(WriteResultOutcome.WRITTEN); + + WriteResultOutcome outcome; + Throwable throwable; + + public WriteResult(final WriteResultOutcome outcome) { + this.outcome = outcome; + } + + public WriteResult(final WriteResultOutcome outcome, final Throwable throwable) { + this.outcome = outcome; + this.throwable = throwable; + } + + public WriteResult(final InfluxDBException e) { + this.throwable = e; + if (e.isRetryWorth()) { + this.outcome = WriteResultOutcome.FAILED_RETRY_POSSIBLE; + } else { + this.outcome = WriteResultOutcome.FAILED_RETRY_IMPOSSIBLE; + } + } + } + + @Override + public void write(final BatchPoints batchPoints) { + // empty the cached data first + ListIterator iterator = batchQueue.listIterator(); + while (iterator.hasNext()) { + BatchPoints entry = iterator.next(); + WriteResult result = tryToWrite(entry); + if (result.outcome == WriteResultOutcome.WRITTEN + || result.outcome == WriteResultOutcome.FAILED_RETRY_IMPOSSIBLE) { + iterator.remove(); + usedRetryBufferCapacity -= entry.getPoints().size(); + exceptionHandler.accept(entry.getPoints(), result.throwable); + } + } + // write the last given batch last so that duplicate data points get overwritten correctly + WriteResult result = tryToWrite(batchPoints); + if (result.outcome == WriteResultOutcome.FAILED_RETRY_POSSIBLE) { + addToBatchQueue(batchPoints); + } + } + + private WriteResult tryToWrite(final BatchPoints batchPoints) { + try { + influxDB.write(batchPoints); + return WriteResult.WRITTEN; + } catch (InfluxDBException e) { + return new WriteResult(e); + } catch (Exception e) { + return new WriteResult(WriteResultOutcome.FAILED_RETRY_POSSIBLE, e); + } + } + + private void evictTooOldFailedWrites() { + while (usedRetryBufferCapacity > retryBufferCapacity && batchQueue.size() > 0) { + List points = batchQueue.removeFirst().getPoints(); + usedRetryBufferCapacity -= points.size(); + exceptionHandler.accept(points, + new InfluxDBException.RetryBufferOverrunException( + "Retry buffer overrun, current capacity: " + retryBufferCapacity)); + } + } + + private void addToBatchQueue(final BatchPoints batchPoints) { + if (batchQueue.size() > 0) { + BatchPoints last = batchQueue.getLast(); + if (last.getPoints().size() + batchPoints.getPoints().size() <= requestActionsLimit) { + boolean hasBeenMergedIn = last.mergeIn(batchPoints); + if (hasBeenMergedIn) { + return; + } + } + } + batchQueue.add(batchPoints); + usedRetryBufferCapacity += batchPoints.getPoints().size(); + evictTooOldFailedWrites(); + } +} From ce90c44e04d9b7706e19d83c68c85d6a41c15ab9 Mon Sep 17 00:00:00 2001 From: dubsky Date: Tue, 9 Jan 2018 17:31:21 +0100 Subject: [PATCH 108/745] implementing cluster consistency setting into batch options --- src/main/java/org/influxdb/impl/InfluxDBImpl.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index 720f443d5..701f2f5c2 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -208,6 +208,7 @@ public InfluxDB enableBatch(final BatchOptions batchOptions) { .interval(batchOptions.getFlushDuration(), batchOptions.getJitterDuration(), TimeUnit.MILLISECONDS) .threadFactory(batchOptions.getThreadFactory()) .bufferLimit(batchOptions.getBufferLimit()) + .consistencyLevel(batchOptions.getConsistency()) .build(); this.batchEnabled.set(true); return this; From 3218ac0a88c9295d6fc72b24c474777531903720 Mon Sep 17 00:00:00 2001 From: dubsky Date: Tue, 9 Jan 2018 17:35:44 +0100 Subject: [PATCH 109/745] fixing checkstyle after rebase --- checkstyle.xml | 4 +--- src/main/java/org/influxdb/impl/BatchProcessor.java | 9 ++------- 2 files changed, 3 insertions(+), 10 deletions(-) diff --git a/checkstyle.xml b/checkstyle.xml index d739eb82f..fe2de79f3 100644 --- a/checkstyle.xml +++ b/checkstyle.xml @@ -28,9 +28,7 @@ - + diff --git a/src/main/java/org/influxdb/impl/BatchProcessor.java b/src/main/java/org/influxdb/impl/BatchProcessor.java index f2f3ad013..e1d6a5073 100644 --- a/src/main/java/org/influxdb/impl/BatchProcessor.java +++ b/src/main/java/org/influxdb/impl/BatchProcessor.java @@ -21,11 +21,6 @@ import java.util.logging.Level; import java.util.logging.Logger; -import org.influxdb.InfluxDB; -import org.influxdb.dto.BatchPoints; -import org.influxdb.dto.Point; - - /** * A BatchProcessor can be attached to a InfluxDB Instance to collect single point writes and * aggregates them to BatchPoints to get a better write performance. @@ -244,8 +239,8 @@ public static Builder builder(final InfluxDB influxDB) { return new Builder(influxDB); } - BatchProcessor(final InfluxDBImpl influxDB, final BatchWriter batchWriter, final ThreadFactory threadFactory, final int actions, - final TimeUnit flushIntervalUnit, final int flushInterval, final int jitterInterval, + BatchProcessor(final InfluxDBImpl influxDB, final BatchWriter batchWriter, final ThreadFactory threadFactory, + final int actions, final TimeUnit flushIntervalUnit, final int flushInterval, final int jitterInterval, final BiConsumer, Throwable> exceptionHandler, final ConsistencyLevel consistencyLevel) { super(); From d0411b7220c7ce59efb3615d451308d3f4c8171c Mon Sep 17 00:00:00 2001 From: dubsky Date: Tue, 9 Jan 2018 17:41:10 +0100 Subject: [PATCH 110/745] fixing after a wrong merge after rebase --- src/test/java/org/influxdb/TestUtils.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/org/influxdb/TestUtils.java b/src/test/java/org/influxdb/TestUtils.java index 865ecdcf2..e85f6feaf 100644 --- a/src/test/java/org/influxdb/TestUtils.java +++ b/src/test/java/org/influxdb/TestUtils.java @@ -65,7 +65,7 @@ public static InfluxDB connectToInfluxDB() throws InterruptedException, IOExcept } while (!influxDBstarted); influxDB.setLogLevel(InfluxDB.LogLevel.NONE); System.out.println("##################################################################################"); - System.out.println("# Connected to InfluxDB Version: " + this.influxDB.version() + " #"); + System.out.println("# Connected to InfluxDB Version: " + influxDB.version() + " #"); System.out.println("##################################################################################"); return influxDB; } From c640568ec34f729d5baf02ec8b70a2717fb5e402 Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Wed, 10 Jan 2018 19:30:46 +0100 Subject: [PATCH 111/745] various maven plugin and dependency updates --- pom.xml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/pom.xml b/pom.xml index bd92d18d7..a791f661e 100644 --- a/pom.xml +++ b/pom.xml @@ -84,7 +84,7 @@ org.apache.maven.plugins maven-site-plugin - 3.6 + 3.7 org.apache.maven.plugins @@ -109,7 +109,7 @@ org.apache.maven.plugins maven-resources-plugin - 3.0.1 + 3.0.2 @@ -143,7 +143,7 @@ org.apache.maven.plugins maven-javadoc-plugin - 3.0.0-M1 + 3.0.0 attach-javadocs @@ -172,7 +172,7 @@ org.jacoco jacoco-maven-plugin - 0.7.9 + 0.8.0 @@ -191,7 +191,7 @@ org.apache.maven.plugins maven-checkstyle-plugin - 2.17 + 3.0.0 true checkstyle.xml @@ -230,13 +230,13 @@ org.assertj assertj-core - 3.8.0 + 3.9.0 test org.mockito mockito-core - 2.12.0 + 2.13.0 test From a10840b59cd56caedb63eae843e6e38125db300c Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Wed, 10 Jan 2018 19:36:02 +0100 Subject: [PATCH 112/745] fix one small javadoc nit --- src/main/java/org/influxdb/dto/Pong.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/main/java/org/influxdb/dto/Pong.java b/src/main/java/org/influxdb/dto/Pong.java index 278633ce1..4aa041e41 100644 --- a/src/main/java/org/influxdb/dto/Pong.java +++ b/src/main/java/org/influxdb/dto/Pong.java @@ -28,6 +28,8 @@ public void setVersion(final String version) { /** * Good or bad connection status. + * + * @return true if the version of influxdb is not unknown. */ public boolean isGood() { return !UNKNOWN_VERSION.equalsIgnoreCase(version); From 00367da5534f4b1f68e42190c2fea7fd4e0c8230 Mon Sep 17 00:00:00 2001 From: dubsky Date: Fri, 12 Jan 2018 14:29:57 +0100 Subject: [PATCH 113/745] adding test for retry writer --- src/main/java/org/influxdb/BatchOptions.java | 2 +- src/main/java/org/influxdb/impl/BatchOptionsImpl.java | 2 +- .../java/org/influxdb/impl/RetryCapableBatchWriter.java | 9 ++++++++- 3 files changed, 10 insertions(+), 3 deletions(-) diff --git a/src/main/java/org/influxdb/BatchOptions.java b/src/main/java/org/influxdb/BatchOptions.java index 1d7e9c727..7c611ea82 100644 --- a/src/main/java/org/influxdb/BatchOptions.java +++ b/src/main/java/org/influxdb/BatchOptions.java @@ -65,7 +65,7 @@ public interface BatchOptions { * to treat a write as a success) * @return the BatchOptions instance to be able to use it in a fluent manner. */ - BatchOptions setConsistency(final InfluxDB.ConsistencyLevel consistency); + BatchOptions consistency(final InfluxDB.ConsistencyLevel consistency); /** diff --git a/src/main/java/org/influxdb/impl/BatchOptionsImpl.java b/src/main/java/org/influxdb/impl/BatchOptionsImpl.java index 5edbc995f..21cc2e757 100644 --- a/src/main/java/org/influxdb/impl/BatchOptionsImpl.java +++ b/src/main/java/org/influxdb/impl/BatchOptionsImpl.java @@ -67,7 +67,7 @@ public BatchOptions exceptionHandler(final BiConsumer, Throwable return clone; } - public BatchOptions setConsistency(final InfluxDB.ConsistencyLevel consistency) { + public BatchOptions consistency(final InfluxDB.ConsistencyLevel consistency) { BatchOptionsImpl clone = getClone(); clone.consistency = consistency; return clone; diff --git a/src/main/java/org/influxdb/impl/RetryCapableBatchWriter.java b/src/main/java/org/influxdb/impl/RetryCapableBatchWriter.java index bf3723374..49be0ecfd 100644 --- a/src/main/java/org/influxdb/impl/RetryCapableBatchWriter.java +++ b/src/main/java/org/influxdb/impl/RetryCapableBatchWriter.java @@ -71,7 +71,14 @@ public void write(final BatchPoints batchPoints) { || result.outcome == WriteResultOutcome.FAILED_RETRY_IMPOSSIBLE) { iterator.remove(); usedRetryBufferCapacity -= entry.getPoints().size(); - exceptionHandler.accept(entry.getPoints(), result.throwable); + // we are throwing out data, notify the client + if (result.outcome == WriteResultOutcome.FAILED_RETRY_IMPOSSIBLE) + exceptionHandler.accept(entry.getPoints(), result.throwable); + } else { + // we cannot send more data otherwise we would write them in different + // order than in which were submitted + addToBatchQueue(batchPoints); + return; } } // write the last given batch last so that duplicate data points get overwritten correctly From 59a787b8c45fb913060cd67d754b79524e3969d8 Mon Sep 17 00:00:00 2001 From: dubsky Date: Fri, 12 Jan 2018 14:54:38 +0100 Subject: [PATCH 114/745] adding test for retry writer --- .../java/org/influxdb/BatchOptionsTest.java | 29 ++++++- src/test/java/org/influxdb/TestUtils.java | 17 +++- .../impl/RetryCapableBatchWriterTest.java | 86 +++++++++++++++++++ 3 files changed, 129 insertions(+), 3 deletions(-) create mode 100644 src/test/java/org/influxdb/impl/RetryCapableBatchWriterTest.java diff --git a/src/test/java/org/influxdb/BatchOptionsTest.java b/src/test/java/org/influxdb/BatchOptionsTest.java index f58a685af..40a93398f 100644 --- a/src/test/java/org/influxdb/BatchOptionsTest.java +++ b/src/test/java/org/influxdb/BatchOptionsTest.java @@ -10,8 +10,10 @@ import org.junit.runner.RunWith; import java.io.IOException; +import java.util.concurrent.Executors; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; +import java.util.function.BiConsumer; @RunWith(JUnitPlatform.class) public class BatchOptionsTest { @@ -37,6 +39,31 @@ public void testBatchEnabledWithDefaultSettings() { } } + @Test + public void testParametersSet() { + BatchOptions options = BatchOptions.DEFAULTS.actions(3); + Assertions.assertEquals(3, options.getActions()); + options=options.consistency(InfluxDB.ConsistencyLevel.ANY); + Assertions.assertEquals(InfluxDB.ConsistencyLevel.ANY, options.getConsistency()); + options=options.flushDuration(1001); + Assertions.assertEquals(1001, options.getFlushDuration()); + options=options.bufferLimit(7070); + Assertions.assertEquals(7070, options.getBufferLimit()); + options=options.jitterDuration(104); + Assertions.assertEquals(104, options.getJitterDuration()); + BiConsumer, Throwable> handler=new BiConsumer, Throwable>() { + @Override + public void accept(Iterable points, Throwable throwable) { + + } + }; + options=options.exceptionHandler(handler); + Assertions.assertEquals(handler, options.getExceptionHandler()); + ThreadFactory tf=Executors.defaultThreadFactory(); + options=options.threadFactory(tf); + Assertions.assertEquals(tf, options.getThreadFactory()); + } + /** * Test the implementation of {@link BatchOptions#actions(int)} }. */ @@ -45,7 +72,7 @@ public void testActionsSetting() throws InterruptedException { String dbName = "write_unittest_" + System.currentTimeMillis(); try { BatchOptions options = BatchOptions.DEFAULTS.actions(3); - Assertions.assertEquals(3, options.getActions()); + this.influxDB.enableBatch(options); this.influxDB.createDatabase(dbName); this.influxDB.setDatabase(dbName); diff --git a/src/test/java/org/influxdb/TestUtils.java b/src/test/java/org/influxdb/TestUtils.java index e85f6feaf..ee225a05b 100644 --- a/src/test/java/org/influxdb/TestUtils.java +++ b/src/test/java/org/influxdb/TestUtils.java @@ -1,5 +1,6 @@ package org.influxdb; +import okhttp3.OkHttpClient; import org.influxdb.dto.Pong; import java.io.IOException; @@ -47,8 +48,20 @@ public static String defaultRetentionPolicy(String version) { } } - public static InfluxDB connectToInfluxDB() throws InterruptedException, IOException { - InfluxDB influxDB = InfluxDBFactory.connect("http://" + TestUtils.getInfluxIP() + ":" + TestUtils.getInfluxPORT(true), "admin", "admin"); + public static InfluxDB connectToInfluxDB() throws InterruptedException, IOException { + return connectToInfluxDB(null); + } + + public static InfluxDB connectToInfluxDB( final OkHttpClient.Builder client) throws InterruptedException, IOException { + OkHttpClient.Builder clientToUse; + if (client == null) { + clientToUse = new OkHttpClient.Builder(); + } else { + clientToUse = client; + } + InfluxDB influxDB = InfluxDBFactory.connect( + "http://" + TestUtils.getInfluxIP() + ":" + TestUtils.getInfluxPORT(true), + "admin", "admin", clientToUse); boolean influxDBstarted = false; do { Pong response; diff --git a/src/test/java/org/influxdb/impl/RetryCapableBatchWriterTest.java b/src/test/java/org/influxdb/impl/RetryCapableBatchWriterTest.java new file mode 100644 index 000000000..9ebf638a3 --- /dev/null +++ b/src/test/java/org/influxdb/impl/RetryCapableBatchWriterTest.java @@ -0,0 +1,86 @@ +package org.influxdb.impl; + +import org.influxdb.InfluxDB; +import org.influxdb.InfluxDBException; +import org.influxdb.dto.BatchPoints; +import org.influxdb.dto.Point; +import org.junit.Assert; +import org.junit.jupiter.api.Test; +import org.junit.platform.runner.JUnitPlatform; +import org.junit.runner.RunWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mockito; + +import java.util.List; +import java.util.function.BiConsumer; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.*; + +@RunWith(JUnitPlatform.class) +public class RetryCapableBatchWriterTest { + + BatchPoints getBP(int count) { + BatchPoints.Builder b = BatchPoints.database("d1"); + for (int i = 0; i < count; i++) { + b.point(Point.measurement("x1").addField("x", 1).build()).build(); + } + return b.build(); + } + + @Test + public void test() { + InfluxDB mockInfluxDB = mock(InfluxDBImpl.class); + BiConsumer errorHandler = mock(BiConsumer.class); + RetryCapableBatchWriter rw = new RetryCapableBatchWriter(mockInfluxDB, errorHandler, + 150, 100); + BatchPoints bp0 = getBP(5); + BatchPoints bp1 = getBP(90); + BatchPoints bp2 = getBP(90); + BatchPoints bp3 = getBP(8); + BatchPoints bp4 = getBP(100); + + Exception nonRecoverable = InfluxDBException.buildExceptionForErrorState("database not found: cvfdgf"); + Exception recoverable = InfluxDBException.buildExceptionForErrorState("cache-max-memory-size exceeded 104/1400"); + Mockito.doThrow(nonRecoverable).when(mockInfluxDB).write(bp0); + Mockito.doThrow(recoverable).when(mockInfluxDB).write(bp1); + Mockito.doThrow(recoverable).when(mockInfluxDB).write(bp2); + Mockito.doThrow(recoverable).when(mockInfluxDB).write(bp3); + // first one will fail with non-recoverable error + rw.write(bp0); + // second one will fail with recoverable error + rw.write(bp1); + // will fail with recoverable error again, will remove data due to buffer limit + rw.write(bp2); + // will write fail with recoverable error + rw.write(bp3); + + ArgumentCaptor captor = ArgumentCaptor.forClass(BatchPoints.class); + verify(mockInfluxDB, times(4)).write(captor.capture()); + final List capturedArgument1 = captor.getAllValues(); + for (BatchPoints b : capturedArgument1) { + System.out.println("batchSize written " + b.getPoints().size()); + } + + Assert.assertEquals(capturedArgument1.get(0).getPoints().size(), 5); + Assert.assertEquals(capturedArgument1.get(1).getPoints().size(), 90); + Assert.assertEquals(capturedArgument1.get(2).getPoints().size(), 90); + Assert.assertEquals(capturedArgument1.get(3).getPoints().size(), 98); + + verify(errorHandler, times(1)).accept(any(),any()); + + // will write data that previously were not sent, will send additional data + Mockito.reset(mockInfluxDB); + rw.write(bp4); + + ArgumentCaptor captor2 = ArgumentCaptor.forClass(BatchPoints.class); + verify(mockInfluxDB, times(2)).write(captor2.capture()); + final List capturedArgument2 = captor2.getAllValues(); + for (BatchPoints b : capturedArgument2) { + System.out.println("batchSize written " + b.getPoints().size()); + } + Assert.assertEquals(capturedArgument2.get(0).getPoints().size(), 98); + Assert.assertEquals(capturedArgument2.get(1).getPoints().size(), 100); + + } +} From aa74fc4d68965dc4f23595f0d10b442fcdb3a1a2 Mon Sep 17 00:00:00 2001 From: dubsky Date: Fri, 12 Jan 2018 17:00:23 +0100 Subject: [PATCH 115/745] implementing code review --- src/main/java/org/influxdb/BatchOptions.java | 105 ++++++++++++++---- .../org/influxdb/impl/BatchOptionsImpl.java | 99 ----------------- 2 files changed, 85 insertions(+), 119 deletions(-) delete mode 100644 src/main/java/org/influxdb/impl/BatchOptionsImpl.java diff --git a/src/main/java/org/influxdb/BatchOptions.java b/src/main/java/org/influxdb/BatchOptions.java index 26e654f24..e9e8c87fc 100644 --- a/src/main/java/org/influxdb/BatchOptions.java +++ b/src/main/java/org/influxdb/BatchOptions.java @@ -1,8 +1,8 @@ package org.influxdb; import org.influxdb.dto.Point; -import org.influxdb.impl.BatchOptionsImpl; +import java.util.concurrent.Executors; import java.util.concurrent.ThreadFactory; import java.util.function.BiConsumer; @@ -10,21 +10,51 @@ * BatchOptions are used to configure batching of individual data point writes * into InfluxDB. See {@link InfluxDB#enableBatch(BatchOptions)} */ -public interface BatchOptions { +public final class BatchOptions implements Cloneable { - BatchOptions DEFAULTS = BatchOptionsImpl.DEFAULTS; + /** + * Default batch options. This class is immutable, each configuration + * is built by taking the DEFAULTS and setting specific configuration + * properties. + */ + public static final BatchOptions DEFAULTS = new BatchOptions(); + + // default values here are consistent with Telegraf + public static final int DEFAULT_BATCH_ACTIONS_LIMIT = 1000; + public static final int DEFAULT_BATCH_INTERVAL_DURATION = 1000; + public static final int DEFAULT_JITTER_INTERVAL_DURATION = 0; + + private int actions = DEFAULT_BATCH_ACTIONS_LIMIT; + private int flushDuration = DEFAULT_BATCH_INTERVAL_DURATION; + private int jitterDuration = DEFAULT_JITTER_INTERVAL_DURATION; + + private ThreadFactory threadFactory = Executors.defaultThreadFactory(); + BiConsumer, Throwable> exceptionHandler = (points, throwable) -> { + }; + private InfluxDB.ConsistencyLevel consistency = InfluxDB.ConsistencyLevel.ONE; + + private BatchOptions() { + } /** * @param actions the number of actions to collect * @return the BatchOptions instance to be able to use it in a fluent manner. */ - BatchOptions actions(final int actions); + public BatchOptions actions(final int actions) { + BatchOptions clone = getClone(); + clone.actions = actions; + return clone; + } /** * @param flushDuration the time to wait at most (milliseconds). * @return the BatchOptions instance to be able to use it in a fluent manner. */ - BatchOptions flushDuration(final int flushDuration); + public BatchOptions flushDuration(final int flushDuration) { + BatchOptions clone = getClone(); + clone.flushDuration = flushDuration; + return clone; + } /** * Jitters the batch flush interval by a random amount. This is primarily to avoid @@ -34,57 +64,92 @@ public interface BatchOptions { * @param jitterDuration (milliseconds) * @return the BatchOptions instance to be able to use it in a fluent manner. */ - BatchOptions jitterDuration(final int jitterDuration); + public BatchOptions jitterDuration(final int jitterDuration) { + BatchOptions clone = getClone(); + clone.jitterDuration = jitterDuration; + return clone; + } /** * @param threadFactory a ThreadFactory instance to be used * @return the BatchOptions instance to be able to use it in a fluent manner. */ - BatchOptions threadFactory(final ThreadFactory threadFactory); + public BatchOptions threadFactory(final ThreadFactory threadFactory) { + BatchOptions clone = getClone(); + clone.threadFactory = threadFactory; + return clone; + } /** * @param exceptionHandler a consumer function to handle asynchronous errors * @return the BatchOptions instance to be able to use it in a fluent manner. */ - BatchOptions exceptionHandler(final BiConsumer, Throwable> exceptionHandler); + public BatchOptions exceptionHandler(final BiConsumer, Throwable> exceptionHandler) { + BatchOptions clone = getClone(); + clone.exceptionHandler = exceptionHandler; + return clone; + } /** * @param consistency cluster consistency setting (how many nodes have to store data points - * to treat a write as a success) + * to treat a write as a success) * @return the BatchOptions instance to be able to use it in a fluent manner. */ - BatchOptions setConsistency(final InfluxDB.ConsistencyLevel consistency); - + public BatchOptions setConsistency(final InfluxDB.ConsistencyLevel consistency) { + BatchOptions clone = getClone(); + clone.consistency = consistency; + return clone; + } - /** - * @return actions the number of actions to collect - */ - int getActions(); + /** + * @return actions the number of actions to collect + */ + public int getActions() { + return actions; + } /** * @return flushDuration the time to wait at most (milliseconds). */ - int getFlushDuration(); + public int getFlushDuration() { + return flushDuration; + } /** * @return batch flush interval jitter value (milliseconds) */ - int getJitterDuration(); + public int getJitterDuration() { + return jitterDuration; + } /** * @return a ThreadFactory instance to be used */ - ThreadFactory getThreadFactory(); + public ThreadFactory getThreadFactory() { + return threadFactory; + } /** * @return a consumer function to handle asynchronous errors */ - BiConsumer, Throwable> getExceptionHandler(); + public BiConsumer, Throwable> getExceptionHandler() { + return exceptionHandler; + } /** * @return cluster consistency setting (how many nodes have to store data points * to treat a write as a success) */ - InfluxDB.ConsistencyLevel getConsistency(); + public InfluxDB.ConsistencyLevel getConsistency() { + return consistency; + } + + private BatchOptions getClone() { + try { + return (BatchOptions) this.clone(); + } catch (CloneNotSupportedException e) { + throw new RuntimeException(e); + } + } } diff --git a/src/main/java/org/influxdb/impl/BatchOptionsImpl.java b/src/main/java/org/influxdb/impl/BatchOptionsImpl.java deleted file mode 100644 index 2fd8bba8d..000000000 --- a/src/main/java/org/influxdb/impl/BatchOptionsImpl.java +++ /dev/null @@ -1,99 +0,0 @@ -package org.influxdb.impl; - -import org.influxdb.BatchOptions; -import org.influxdb.InfluxDB; -import org.influxdb.dto.Point; - -import java.util.concurrent.Executors; -import java.util.concurrent.ThreadFactory; -import java.util.function.BiConsumer; - -public final class BatchOptionsImpl implements BatchOptions, Cloneable { - - public static final BatchOptions DEFAULTS = new BatchOptionsImpl(); - - // default values here are consistent with Telegraf - public static final int DEFAULT_BATCH_ACTIONS_LIMIT = 1000; - public static final int DEFAULT_BATCH_INTERVAL_DURATION = 1000; - public static final int DEFAULT_JITTER_INTERVAL_DURATION = 0; - - int actions = DEFAULT_BATCH_ACTIONS_LIMIT; - int flushDuration = DEFAULT_BATCH_INTERVAL_DURATION; - int jitterDuration = DEFAULT_JITTER_INTERVAL_DURATION; - - ThreadFactory threadFactory = Executors.defaultThreadFactory(); - BiConsumer, Throwable> exceptionHandler = (points, throwable) -> { - }; - InfluxDB.ConsistencyLevel consistency = InfluxDB.ConsistencyLevel.ONE; - - private BatchOptionsImpl() { - } - - public BatchOptions actions(final int actions) { - BatchOptionsImpl clone = getClone(); - clone.actions = actions; - return clone; - } - - public BatchOptions flushDuration(final int flushDuration) { - BatchOptionsImpl clone = getClone(); - clone.flushDuration = flushDuration; - return clone; - } - - public BatchOptions jitterDuration(final int jitterDuration) { - BatchOptionsImpl clone = getClone(); - clone.jitterDuration = jitterDuration; - return clone; - } - - public BatchOptions threadFactory(final ThreadFactory threadFactory) { - BatchOptionsImpl clone = getClone(); - clone.threadFactory = threadFactory; - return clone; - } - - public BatchOptions exceptionHandler(final BiConsumer, Throwable> exceptionHandler) { - BatchOptionsImpl clone = getClone(); - clone.exceptionHandler = exceptionHandler; - return clone; - } - - public BatchOptions setConsistency(final InfluxDB.ConsistencyLevel consistency) { - BatchOptionsImpl clone = getClone(); - clone.consistency = consistency; - return clone; - } - - private BatchOptionsImpl getClone() { - try { - return (BatchOptionsImpl) this.clone(); - } catch (CloneNotSupportedException e) { - throw new RuntimeException(e); - } - } - - public int getActions() { - return actions; - } - - public int getFlushDuration() { - return flushDuration; - } - - public int getJitterDuration() { - return jitterDuration; - } - - public InfluxDB.ConsistencyLevel getConsistency() { - return consistency; - } - - public ThreadFactory getThreadFactory() { - return threadFactory; - } - - public BiConsumer, Throwable> getExceptionHandler() { - return exceptionHandler; - } -} From a3eae959c6837a21ff4306cf40a37f39f5aab6f6 Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Wed, 17 Jan 2018 16:25:06 +0700 Subject: [PATCH 116/745] add test for flushDuration, jitterDuration, bufferLimit --- .../java/org/influxdb/BatchOptionsTest.java | 154 +++++++++++++++++- 1 file changed, 151 insertions(+), 3 deletions(-) diff --git a/src/test/java/org/influxdb/BatchOptionsTest.java b/src/test/java/org/influxdb/BatchOptionsTest.java index 40a93398f..2ed53b036 100644 --- a/src/test/java/org/influxdb/BatchOptionsTest.java +++ b/src/test/java/org/influxdb/BatchOptionsTest.java @@ -6,6 +6,7 @@ import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.function.Executable; import org.junit.platform.runner.JUnitPlatform; import org.junit.runner.RunWith; @@ -28,7 +29,7 @@ public void setUp() throws InterruptedException, IOException { /** * Test the implementation of {@link InfluxDB#enableBatch(int, int, TimeUnit, ThreadFactory)}. */ - @Test + //@Test public void testBatchEnabledWithDefaultSettings() { try { this.influxDB.enableBatch(); @@ -39,7 +40,7 @@ public void testBatchEnabledWithDefaultSettings() { } } - @Test + //@Test public void testParametersSet() { BatchOptions options = BatchOptions.DEFAULTS.actions(3); Assertions.assertEquals(3, options.getActions()); @@ -67,7 +68,7 @@ public void accept(Iterable points, Throwable throwable) { /** * Test the implementation of {@link BatchOptions#actions(int)} }. */ - @Test + //@Test public void testActionsSetting() throws InterruptedException { String dbName = "write_unittest_" + System.currentTimeMillis(); try { @@ -94,4 +95,151 @@ public void testActionsSetting() throws InterruptedException { } } + /** + * Test the implementation of {@link BatchOptions#flushDuration(int)} }. + * @throws InterruptedException + */ + //@Test + public void testFlushDuration() throws InterruptedException { + String dbName = "write_unittest_" + System.currentTimeMillis(); + try { + BatchOptions options = BatchOptions.DEFAULTS.flushDuration(10000); + + this.influxDB.createDatabase(dbName); + this.influxDB.setDatabase(dbName); + this.influxDB.enableBatch(options); + for (int j = 0; j < 20; j++) { + Point point = Point.measurement("weather") + .time(j,TimeUnit.HOURS) + .addField("temperature", (double) j) + .addField("humidity", (double) (j) * 1.1) + .addField("uv_index", "moderate").build(); + this.influxDB.write(point); + } + + QueryResult result = influxDB.query(new Query("select * from weather", dbName)); + Assertions.assertNull(result.getResults().get(0).getSeries()); + Assertions.assertNull(result.getResults().get(0).getError()); + + Thread.sleep(12000); + result = influxDB.query(new Query("select * from weather", dbName)); + Assertions.assertEquals(20, result.getResults().get(0).getSeries().get(0).getValues().size()); + } + finally { + this.influxDB.disableBatch(); + this.influxDB.deleteDatabase(dbName); + } + } + + /** + * Test the implementation of {@link BatchOptions#jitterDuration(int)} }. + * @throws InterruptedException + */ + //@Test + public void testJitterDuration() throws InterruptedException { + + String dbName = "write_unittest_" + System.currentTimeMillis(); + try { + BatchOptions options = BatchOptions.DEFAULTS.flushDuration(1000).jitterDuration(125); + + this.influxDB.createDatabase(dbName); + this.influxDB.setDatabase(dbName); + this.influxDB.enableBatch(options); + for (int j = 0; j < 20; j++) { + Point point = Point.measurement("weather") + .time(j,TimeUnit.HOURS) + .addField("temperature", (double) j) + .addField("humidity", (double) (j) * 1.1) + .addField("uv_index", "moderate").build(); + this.influxDB.write(point); + } + + QueryResult result = influxDB.query(new Query("select * from weather", dbName)); + Assertions.assertNull(result.getResults().get(0).getSeries()); + Assertions.assertNull(result.getResults().get(0).getError()); + + Thread.sleep(1125); + result = influxDB.query(new Query("select * from weather", dbName)); + Assertions.assertEquals(20, result.getResults().get(0).getSeries().get(0).getValues().size()); + } + finally { + this.influxDB.disableBatch(); + this.influxDB.deleteDatabase(dbName); + } + + + } + + /** + * Test the implementation of {@link BatchOptions#jitterDuration(int)} }. + */ + //@Test + public void testNegativeJitterDuration() { + + Assertions.assertThrows(IllegalArgumentException.class, () -> { + BatchOptions options = BatchOptions.DEFAULTS.jitterDuration(-10); + influxDB.enableBatch(options); + + influxDB.disableBatch(); + options = BatchOptions.DEFAULTS.jitterDuration(0); + influxDB.enableBatch(); + influxDB.disableBatch(); + }); + } + + /** + * Test the implementation of {@link BatchOptions#bufferLimit(int)} }. + */ + @Test + public void testBufferLimit() throws InterruptedException { + + int[][] bufferLimit2Actions = {{10, 4}, {3, 4}}; + + for (int[] bufferLimit2Action : bufferLimit2Actions) { + String dbName = "write_unittest_" + System.currentTimeMillis(); + try { + BatchOptions options = BatchOptions.DEFAULTS.bufferLimit(bufferLimit2Action[0]).actions(bufferLimit2Action[1]); + + this.influxDB.createDatabase(dbName); + this.influxDB.setDatabase(dbName); + this.influxDB.enableBatch(options); + for (int j = 0; j < 10; j++) { + Point point = Point.measurement("weather") + .time(j,TimeUnit.HOURS) + .addField("temperature", (double) j) + .addField("humidity", (double) (j) * 1.1) + .addField("uv_index", "moderate").build(); + this.influxDB.write(point); + } + + QueryResult result = influxDB.query(new Query("select * from weather", dbName)); + Assertions.assertEquals(8, result.getResults().get(0).getSeries().get(0).getValues().size()); + Thread.sleep(1000); + result = influxDB.query(new Query("select * from weather", dbName)); + Assertions.assertEquals(10, result.getResults().get(0).getSeries().get(0).getValues().size()); + } + finally { + this.influxDB.disableBatch(); + this.influxDB.deleteDatabase(dbName); + } + } + + } + + /** + * Test the implementation of {@link BatchOptions#bufferLimit(int)} }. + */ + //@Test + public void testNegativeBufferLimit() { + + Assertions.assertThrows(IllegalArgumentException.class, () -> { + BatchOptions options = BatchOptions.DEFAULTS.bufferLimit(-10); + influxDB.enableBatch(options); + + influxDB.disableBatch(); + options = BatchOptions.DEFAULTS.bufferLimit(0); + influxDB.enableBatch(); + influxDB.disableBatch(); + }); + } } From 9e6809e9a6f24e8df26bde32eb47e2d92e15afb0 Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Thu, 18 Jan 2018 10:46:38 +0700 Subject: [PATCH 117/745] fixing checkstyle --- src/main/java/org/influxdb/impl/RetryCapableBatchWriter.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/main/java/org/influxdb/impl/RetryCapableBatchWriter.java b/src/main/java/org/influxdb/impl/RetryCapableBatchWriter.java index 49be0ecfd..6871dfc54 100644 --- a/src/main/java/org/influxdb/impl/RetryCapableBatchWriter.java +++ b/src/main/java/org/influxdb/impl/RetryCapableBatchWriter.java @@ -72,8 +72,9 @@ public void write(final BatchPoints batchPoints) { iterator.remove(); usedRetryBufferCapacity -= entry.getPoints().size(); // we are throwing out data, notify the client - if (result.outcome == WriteResultOutcome.FAILED_RETRY_IMPOSSIBLE) + if (result.outcome == WriteResultOutcome.FAILED_RETRY_IMPOSSIBLE) { exceptionHandler.accept(entry.getPoints(), result.throwable); + } } else { // we cannot send more data otherwise we would write them in different // order than in which were submitted From 7e6741bf6a71af3f8c9c61ba4e3826e83a165c3b Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Fri, 19 Jan 2018 13:56:50 +0700 Subject: [PATCH 118/745] add test for threadFactory, exceptionHandler, consistency --- .../java/org/influxdb/BatchOptionsTest.java | 286 +++++++++++++----- 1 file changed, 218 insertions(+), 68 deletions(-) diff --git a/src/test/java/org/influxdb/BatchOptionsTest.java b/src/test/java/org/influxdb/BatchOptionsTest.java index 2ed53b036..d452459aa 100644 --- a/src/test/java/org/influxdb/BatchOptionsTest.java +++ b/src/test/java/org/influxdb/BatchOptionsTest.java @@ -1,21 +1,27 @@ package org.influxdb; +import org.influxdb.InfluxDB.ConsistencyLevel; +import org.influxdb.InfluxDBException.DatabaseNotFoundError; +import org.influxdb.dto.BatchPoints; import org.influxdb.dto.Point; import org.influxdb.dto.Query; import org.influxdb.dto.QueryResult; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.function.Executable; import org.junit.platform.runner.JUnitPlatform; import org.junit.runner.RunWith; +import static org.mockito.Mockito.*; import java.io.IOException; +import java.util.HashMap; +import java.util.Map; import java.util.concurrent.Executors; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.function.BiConsumer; + @RunWith(JUnitPlatform.class) public class BatchOptionsTest { @@ -29,7 +35,7 @@ public void setUp() throws InterruptedException, IOException { /** * Test the implementation of {@link InfluxDB#enableBatch(int, int, TimeUnit, ThreadFactory)}. */ - //@Test + @Test public void testBatchEnabledWithDefaultSettings() { try { this.influxDB.enableBatch(); @@ -40,7 +46,7 @@ public void testBatchEnabledWithDefaultSettings() { } } - //@Test + @Test public void testParametersSet() { BatchOptions options = BatchOptions.DEFAULTS.actions(3); Assertions.assertEquals(3, options.getActions()); @@ -68,7 +74,7 @@ public void accept(Iterable points, Throwable throwable) { /** * Test the implementation of {@link BatchOptions#actions(int)} }. */ - //@Test + @Test public void testActionsSetting() throws InterruptedException { String dbName = "write_unittest_" + System.currentTimeMillis(); try { @@ -99,29 +105,21 @@ public void testActionsSetting() throws InterruptedException { * Test the implementation of {@link BatchOptions#flushDuration(int)} }. * @throws InterruptedException */ - //@Test + @Test public void testFlushDuration() throws InterruptedException { String dbName = "write_unittest_" + System.currentTimeMillis(); try { BatchOptions options = BatchOptions.DEFAULTS.flushDuration(10000); - - this.influxDB.createDatabase(dbName); - this.influxDB.setDatabase(dbName); - this.influxDB.enableBatch(options); - for (int j = 0; j < 20; j++) { - Point point = Point.measurement("weather") - .time(j,TimeUnit.HOURS) - .addField("temperature", (double) j) - .addField("humidity", (double) (j) * 1.1) - .addField("uv_index", "moderate").build(); - this.influxDB.write(point); - } + influxDB.createDatabase(dbName); + influxDB.setDatabase(dbName); + influxDB.enableBatch(options); + write20Points(influxDB); QueryResult result = influxDB.query(new Query("select * from weather", dbName)); Assertions.assertNull(result.getResults().get(0).getSeries()); Assertions.assertNull(result.getResults().get(0).getError()); - Thread.sleep(12000); + Thread.sleep(10000); result = influxDB.query(new Query("select * from weather", dbName)); Assertions.assertEquals(20, result.getResults().get(0).getSeries().get(0).getValues().size()); } @@ -135,24 +133,17 @@ public void testFlushDuration() throws InterruptedException { * Test the implementation of {@link BatchOptions#jitterDuration(int)} }. * @throws InterruptedException */ - //@Test + @Test public void testJitterDuration() throws InterruptedException { + String dbName = "write_unittest_" + System.currentTimeMillis(); try { BatchOptions options = BatchOptions.DEFAULTS.flushDuration(1000).jitterDuration(125); - - this.influxDB.createDatabase(dbName); - this.influxDB.setDatabase(dbName); - this.influxDB.enableBatch(options); - for (int j = 0; j < 20; j++) { - Point point = Point.measurement("weather") - .time(j,TimeUnit.HOURS) - .addField("temperature", (double) j) - .addField("humidity", (double) (j) * 1.1) - .addField("uv_index", "moderate").build(); - this.influxDB.write(point); - } + influxDB.createDatabase(dbName); + influxDB.setDatabase(dbName); + influxDB.enableBatch(options); + write20Points(influxDB); QueryResult result = influxDB.query(new Query("select * from weather", dbName)); Assertions.assertNull(result.getResults().get(0).getSeries()); @@ -163,8 +154,8 @@ public void testJitterDuration() throws InterruptedException { Assertions.assertEquals(20, result.getResults().get(0).getSeries().get(0).getValues().size()); } finally { - this.influxDB.disableBatch(); - this.influxDB.deleteDatabase(dbName); + influxDB.disableBatch(); + influxDB.deleteDatabase(dbName); } @@ -173,7 +164,7 @@ public void testJitterDuration() throws InterruptedException { /** * Test the implementation of {@link BatchOptions#jitterDuration(int)} }. */ - //@Test + @Test public void testNegativeJitterDuration() { Assertions.assertThrows(IllegalArgumentException.class, () -> { @@ -187,49 +178,52 @@ public void testNegativeJitterDuration() { }); } + + private void doTestBufferLimit(int bufferLimit, int actions) throws InterruptedException { + String dbName = "write_unittest_" + System.currentTimeMillis(); + try { + BatchOptions options = BatchOptions.DEFAULTS.bufferLimit(bufferLimit).actions(actions); + + influxDB.createDatabase(dbName); + influxDB.setDatabase(dbName); + influxDB.enableBatch(options); + write20Points(influxDB); + + QueryResult result = influxDB.query(new Query("select * from weather", dbName)); + Thread.sleep(1000); + result = influxDB.query(new Query("select * from weather", dbName)); + Assertions.assertEquals(20, result.getResults().get(0).getSeries().get(0).getValues().size()); + } + finally { + influxDB.disableBatch(); + influxDB.deleteDatabase(dbName); + } + } + + /** * Test the implementation of {@link BatchOptions#bufferLimit(int)} }. */ @Test - public void testBufferLimit() throws InterruptedException { - - int[][] bufferLimit2Actions = {{10, 4}, {3, 4}}; - - for (int[] bufferLimit2Action : bufferLimit2Actions) { - String dbName = "write_unittest_" + System.currentTimeMillis(); - try { - BatchOptions options = BatchOptions.DEFAULTS.bufferLimit(bufferLimit2Action[0]).actions(bufferLimit2Action[1]); - - this.influxDB.createDatabase(dbName); - this.influxDB.setDatabase(dbName); - this.influxDB.enableBatch(options); - for (int j = 0; j < 10; j++) { - Point point = Point.measurement("weather") - .time(j,TimeUnit.HOURS) - .addField("temperature", (double) j) - .addField("humidity", (double) (j) * 1.1) - .addField("uv_index", "moderate").build(); - this.influxDB.write(point); - } - - QueryResult result = influxDB.query(new Query("select * from weather", dbName)); - Assertions.assertEquals(8, result.getResults().get(0).getSeries().get(0).getValues().size()); - Thread.sleep(1000); - result = influxDB.query(new Query("select * from weather", dbName)); - Assertions.assertEquals(10, result.getResults().get(0).getSeries().get(0).getValues().size()); - } - finally { - this.influxDB.disableBatch(); - this.influxDB.deleteDatabase(dbName); - } - } + public void testBufferLimit1() throws InterruptedException { + + doTestBufferLimit(3, 4); + + } + + /** + * Test the implementation of {@link BatchOptions#bufferLimit(int)} }. + */ + @Test + public void testBufferLimit2() throws InterruptedException { + + doTestBufferLimit(10, 4); } - /** * Test the implementation of {@link BatchOptions#bufferLimit(int)} }. */ - //@Test + @Test public void testNegativeBufferLimit() { Assertions.assertThrows(IllegalArgumentException.class, () -> { @@ -242,4 +236,160 @@ public void testNegativeBufferLimit() { influxDB.disableBatch(); }); } + + /** + * Test the implementation of {@link BatchOptions#threadFactory(ThreadFactory)} }. + * @throws InterruptedException + */ + @Test + public void testThreadFactory() throws InterruptedException { + + String dbName = "write_unittest_" + System.currentTimeMillis(); + try { + BatchOptions options = BatchOptions.DEFAULTS.threadFactory((r) -> { + return new Thread(r); + }); + + influxDB.createDatabase(dbName); + influxDB.setDatabase(dbName); + influxDB.enableBatch(options); + write20Points(influxDB); + + Thread.sleep(3000); + QueryResult result = influxDB.query(new Query("select * from weather", dbName)); + Assertions.assertEquals(20, result.getResults().get(0).getSeries().get(0).getValues().size()); + } finally { + this.influxDB.disableBatch(); + this.influxDB.deleteDatabase(dbName); + } + + } + + /** + * Test the implementation of {@link BatchOptions#exceptionHandler(BiConsumer)} }. + * @throws InterruptedException + */ + @Test + public void testHandlerOnRetryImpossible() throws InterruptedException { + + String dbName = "write_unittest_" + System.currentTimeMillis(); + InfluxDB spy = spy(influxDB); + doThrow(DatabaseNotFoundError.class).when(spy).write(any(BatchPoints.class)); + + try { + BiConsumer, Throwable> mockHandler = mock(BiConsumer.class); + BatchOptions options = BatchOptions.DEFAULTS.exceptionHandler(mockHandler); + + spy.createDatabase(dbName); + spy.setDatabase(dbName); + spy.enableBatch(options); + + writeSomePoints(spy, 1); + + Thread.sleep(1000); + verify(mockHandler, times(1)).accept(any(), any()); + + QueryResult result = influxDB.query(new Query("select * from weather", dbName)); + Assertions.assertNull(result.getResults().get(0).getSeries()); + Assertions.assertNull(result.getResults().get(0).getError()); + } finally { + spy.disableBatch(); + spy.deleteDatabase(dbName); + } + + } + + /** + * Test the implementation of {@link BatchOptions#exceptionHandler(BiConsumer)} }. + * @throws InterruptedException + */ + @Test + public void testHandlerOnRetryPossible() throws InterruptedException { + + String dbName = "write_unittest_" + System.currentTimeMillis(); + InfluxDB spy = spy(influxDB); + final Map map = new HashMap<>(1); + map.put("firstCall", true); + doAnswer((invocation) -> { + if (map.get("firstCall")) { + map.put("firstCall", false); + throw new InfluxDBException("error"); + } else { + return invocation.callRealMethod(); + } + + }).when(spy).write(any(BatchPoints.class)); + + try { + BiConsumer, Throwable> mockHandler = mock(BiConsumer.class); + BatchOptions options = BatchOptions.DEFAULTS.exceptionHandler(mockHandler); + + spy.createDatabase(dbName); + spy.setDatabase(dbName); + spy.enableBatch(options); + + writeSomePoints(spy, 1); + + Thread.sleep(5000); + verify(mockHandler, never()).accept(any(), any()); + + QueryResult result = influxDB.query(new Query("select * from weather", dbName)); + Assertions.assertNotNull(result.getResults().get(0).getSeries()); + Assertions.assertEquals(1, result.getResults().get(0).getSeries().get(0).getValues().size()); + + } finally { + spy.disableBatch(); + spy.deleteDatabase(dbName); + } + + } + + /** + * Test the implementation of {@link BatchOptions#consistency(InfluxDB.ConsistencyLevel)} }. + * @throws InterruptedException + */ + @Test + public void testConsistency() throws InterruptedException { + String dbName = "write_unittest_" + System.currentTimeMillis(); + influxDB.createDatabase(dbName); + influxDB.setDatabase(dbName); + try { + int n = 5; + for (ConsistencyLevel consistencyLevel : ConsistencyLevel.values()) { + BatchOptions options = BatchOptions.DEFAULTS.consistency(consistencyLevel); + + influxDB.enableBatch(options); + writeSomePoints(influxDB, n); + + Thread.sleep(2000); + QueryResult result = influxDB.query(new Query("select * from weather", dbName)); + Assertions.assertEquals(n, result.getResults().get(0).getSeries().get(0).getValues().size()); + + n += 5; + this.influxDB.disableBatch(); + } + + } finally { + this.influxDB.deleteDatabase(dbName); + } + } + + private void writeSomePoints(InfluxDB influxDB, int firstIndex, int lastIndex) { + for (int i = firstIndex; i <= lastIndex; i++) { + Point point = Point.measurement("weather") + .time(i,TimeUnit.HOURS) + .addField("temperature", (double) i) + .addField("humidity", (double) (i) * 1.1) + .addField("uv_index", "moderate").build(); + influxDB.write(point); + } + } + + private void write20Points(InfluxDB influxDB) { + writeSomePoints(influxDB, 0, 19); + } + + private void writeSomePoints(InfluxDB influxDB, int n) { + writeSomePoints(influxDB, 0, n - 1); + } } From 7b92104c7950e94ba12e40761e0161a1bee84d6a Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Fri, 19 Jan 2018 14:15:12 +0700 Subject: [PATCH 119/745] fix test for flushDuration --- .../java/org/influxdb/BatchOptionsTest.java | 28 +++++++++---------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/src/test/java/org/influxdb/BatchOptionsTest.java b/src/test/java/org/influxdb/BatchOptionsTest.java index d452459aa..ab63141e1 100644 --- a/src/test/java/org/influxdb/BatchOptionsTest.java +++ b/src/test/java/org/influxdb/BatchOptionsTest.java @@ -35,7 +35,7 @@ public void setUp() throws InterruptedException, IOException { /** * Test the implementation of {@link InfluxDB#enableBatch(int, int, TimeUnit, ThreadFactory)}. */ - @Test + //@Test public void testBatchEnabledWithDefaultSettings() { try { this.influxDB.enableBatch(); @@ -46,7 +46,7 @@ public void testBatchEnabledWithDefaultSettings() { } } - @Test + //@Test public void testParametersSet() { BatchOptions options = BatchOptions.DEFAULTS.actions(3); Assertions.assertEquals(3, options.getActions()); @@ -74,7 +74,7 @@ public void accept(Iterable points, Throwable throwable) { /** * Test the implementation of {@link BatchOptions#actions(int)} }. */ - @Test + //@Test public void testActionsSetting() throws InterruptedException { String dbName = "write_unittest_" + System.currentTimeMillis(); try { @@ -109,7 +109,7 @@ public void testActionsSetting() throws InterruptedException { public void testFlushDuration() throws InterruptedException { String dbName = "write_unittest_" + System.currentTimeMillis(); try { - BatchOptions options = BatchOptions.DEFAULTS.flushDuration(10000); + BatchOptions options = BatchOptions.DEFAULTS.flushDuration(500); influxDB.createDatabase(dbName); influxDB.setDatabase(dbName); influxDB.enableBatch(options); @@ -119,7 +119,7 @@ public void testFlushDuration() throws InterruptedException { Assertions.assertNull(result.getResults().get(0).getSeries()); Assertions.assertNull(result.getResults().get(0).getError()); - Thread.sleep(10000); + Thread.sleep(1000); result = influxDB.query(new Query("select * from weather", dbName)); Assertions.assertEquals(20, result.getResults().get(0).getSeries().get(0).getValues().size()); } @@ -133,7 +133,7 @@ public void testFlushDuration() throws InterruptedException { * Test the implementation of {@link BatchOptions#jitterDuration(int)} }. * @throws InterruptedException */ - @Test + //@Test public void testJitterDuration() throws InterruptedException { @@ -164,7 +164,7 @@ public void testJitterDuration() throws InterruptedException { /** * Test the implementation of {@link BatchOptions#jitterDuration(int)} }. */ - @Test + //@Test public void testNegativeJitterDuration() { Assertions.assertThrows(IllegalArgumentException.class, () -> { @@ -204,7 +204,7 @@ private void doTestBufferLimit(int bufferLimit, int actions) throws InterruptedE /** * Test the implementation of {@link BatchOptions#bufferLimit(int)} }. */ - @Test + //@Test public void testBufferLimit1() throws InterruptedException { doTestBufferLimit(3, 4); @@ -214,7 +214,7 @@ public void testBufferLimit1() throws InterruptedException { /** * Test the implementation of {@link BatchOptions#bufferLimit(int)} }. */ - @Test + //@Test public void testBufferLimit2() throws InterruptedException { doTestBufferLimit(10, 4); @@ -223,7 +223,7 @@ public void testBufferLimit2() throws InterruptedException { /** * Test the implementation of {@link BatchOptions#bufferLimit(int)} }. */ - @Test + //@Test public void testNegativeBufferLimit() { Assertions.assertThrows(IllegalArgumentException.class, () -> { @@ -241,7 +241,7 @@ public void testNegativeBufferLimit() { * Test the implementation of {@link BatchOptions#threadFactory(ThreadFactory)} }. * @throws InterruptedException */ - @Test + //@Test public void testThreadFactory() throws InterruptedException { String dbName = "write_unittest_" + System.currentTimeMillis(); @@ -269,7 +269,7 @@ public void testThreadFactory() throws InterruptedException { * Test the implementation of {@link BatchOptions#exceptionHandler(BiConsumer)} }. * @throws InterruptedException */ - @Test + //@Test public void testHandlerOnRetryImpossible() throws InterruptedException { String dbName = "write_unittest_" + System.currentTimeMillis(); @@ -303,7 +303,7 @@ public void testHandlerOnRetryImpossible() throws InterruptedException { * Test the implementation of {@link BatchOptions#exceptionHandler(BiConsumer)} }. * @throws InterruptedException */ - @Test + //@Test public void testHandlerOnRetryPossible() throws InterruptedException { String dbName = "write_unittest_" + System.currentTimeMillis(); @@ -348,7 +348,7 @@ public void testHandlerOnRetryPossible() throws InterruptedException { * Test the implementation of {@link BatchOptions#consistency(InfluxDB.ConsistencyLevel)} }. * @throws InterruptedException */ - @Test + //@Test public void testConsistency() throws InterruptedException { String dbName = "write_unittest_" + System.currentTimeMillis(); influxDB.createDatabase(dbName); From 4c668651e89a06b81127a8b243930fc0044906b0 Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Fri, 19 Jan 2018 14:20:54 +0700 Subject: [PATCH 120/745] uncomment Test annotation --- .../java/org/influxdb/BatchOptionsTest.java | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/src/test/java/org/influxdb/BatchOptionsTest.java b/src/test/java/org/influxdb/BatchOptionsTest.java index ab63141e1..9b6ca94e4 100644 --- a/src/test/java/org/influxdb/BatchOptionsTest.java +++ b/src/test/java/org/influxdb/BatchOptionsTest.java @@ -35,7 +35,7 @@ public void setUp() throws InterruptedException, IOException { /** * Test the implementation of {@link InfluxDB#enableBatch(int, int, TimeUnit, ThreadFactory)}. */ - //@Test + @Test public void testBatchEnabledWithDefaultSettings() { try { this.influxDB.enableBatch(); @@ -46,7 +46,7 @@ public void testBatchEnabledWithDefaultSettings() { } } - //@Test + @Test public void testParametersSet() { BatchOptions options = BatchOptions.DEFAULTS.actions(3); Assertions.assertEquals(3, options.getActions()); @@ -74,7 +74,7 @@ public void accept(Iterable points, Throwable throwable) { /** * Test the implementation of {@link BatchOptions#actions(int)} }. */ - //@Test + @Test public void testActionsSetting() throws InterruptedException { String dbName = "write_unittest_" + System.currentTimeMillis(); try { @@ -133,7 +133,7 @@ public void testFlushDuration() throws InterruptedException { * Test the implementation of {@link BatchOptions#jitterDuration(int)} }. * @throws InterruptedException */ - //@Test + @Test public void testJitterDuration() throws InterruptedException { @@ -164,7 +164,7 @@ public void testJitterDuration() throws InterruptedException { /** * Test the implementation of {@link BatchOptions#jitterDuration(int)} }. */ - //@Test + @Test public void testNegativeJitterDuration() { Assertions.assertThrows(IllegalArgumentException.class, () -> { @@ -204,7 +204,7 @@ private void doTestBufferLimit(int bufferLimit, int actions) throws InterruptedE /** * Test the implementation of {@link BatchOptions#bufferLimit(int)} }. */ - //@Test + @Test public void testBufferLimit1() throws InterruptedException { doTestBufferLimit(3, 4); @@ -214,7 +214,7 @@ public void testBufferLimit1() throws InterruptedException { /** * Test the implementation of {@link BatchOptions#bufferLimit(int)} }. */ - //@Test + @Test public void testBufferLimit2() throws InterruptedException { doTestBufferLimit(10, 4); @@ -223,7 +223,7 @@ public void testBufferLimit2() throws InterruptedException { /** * Test the implementation of {@link BatchOptions#bufferLimit(int)} }. */ - //@Test + @Test public void testNegativeBufferLimit() { Assertions.assertThrows(IllegalArgumentException.class, () -> { @@ -241,7 +241,7 @@ public void testNegativeBufferLimit() { * Test the implementation of {@link BatchOptions#threadFactory(ThreadFactory)} }. * @throws InterruptedException */ - //@Test + @Test public void testThreadFactory() throws InterruptedException { String dbName = "write_unittest_" + System.currentTimeMillis(); @@ -269,7 +269,7 @@ public void testThreadFactory() throws InterruptedException { * Test the implementation of {@link BatchOptions#exceptionHandler(BiConsumer)} }. * @throws InterruptedException */ - //@Test + @Test public void testHandlerOnRetryImpossible() throws InterruptedException { String dbName = "write_unittest_" + System.currentTimeMillis(); @@ -303,7 +303,7 @@ public void testHandlerOnRetryImpossible() throws InterruptedException { * Test the implementation of {@link BatchOptions#exceptionHandler(BiConsumer)} }. * @throws InterruptedException */ - //@Test + @Test public void testHandlerOnRetryPossible() throws InterruptedException { String dbName = "write_unittest_" + System.currentTimeMillis(); @@ -348,7 +348,7 @@ public void testHandlerOnRetryPossible() throws InterruptedException { * Test the implementation of {@link BatchOptions#consistency(InfluxDB.ConsistencyLevel)} }. * @throws InterruptedException */ - //@Test + @Test public void testConsistency() throws InterruptedException { String dbName = "write_unittest_" + System.currentTimeMillis(); influxDB.createDatabase(dbName); From b7a2306c3a569c6ea84416d3a4c5d2ab1b0bb117 Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Fri, 19 Jan 2018 23:12:50 +0700 Subject: [PATCH 121/745] improve test --- .../java/org/influxdb/BatchOptionsTest.java | 26 +++++++++++-------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/src/test/java/org/influxdb/BatchOptionsTest.java b/src/test/java/org/influxdb/BatchOptionsTest.java index 9b6ca94e4..86f720bce 100644 --- a/src/test/java/org/influxdb/BatchOptionsTest.java +++ b/src/test/java/org/influxdb/BatchOptionsTest.java @@ -11,11 +11,12 @@ import org.junit.jupiter.api.Test; import org.junit.platform.runner.JUnitPlatform; import org.junit.runner.RunWith; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; + import static org.mockito.Mockito.*; import java.io.IOException; -import java.util.HashMap; -import java.util.Map; import java.util.concurrent.Executors; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; @@ -308,16 +309,17 @@ public void testHandlerOnRetryPossible() throws InterruptedException { String dbName = "write_unittest_" + System.currentTimeMillis(); InfluxDB spy = spy(influxDB); - final Map map = new HashMap<>(1); - map.put("firstCall", true); - doAnswer((invocation) -> { - if (map.get("firstCall")) { - map.put("firstCall", false); - throw new InfluxDBException("error"); - } else { - return invocation.callRealMethod(); + doAnswer(new Answer() { + boolean firstCall = true; + @Override + public Object answer(InvocationOnMock invocation) throws Throwable { + if (firstCall) { + firstCall = false; + throw new InfluxDBException("error"); + } else { + return invocation.callRealMethod(); + } } - }).when(spy).write(any(BatchPoints.class)); try { @@ -333,6 +335,8 @@ public void testHandlerOnRetryPossible() throws InterruptedException { Thread.sleep(5000); verify(mockHandler, never()).accept(any(), any()); + verify(spy, times(2)).write(any(BatchPoints.class)); + QueryResult result = influxDB.query(new Query("select * from weather", dbName)); Assertions.assertNotNull(result.getResults().get(0).getSeries()); Assertions.assertEquals(1, result.getResults().get(0).getSeries().get(0).getValues().size()); From ded8b48a164e7e4a5c496d73766e0543fef34f96 Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Fri, 19 Jan 2018 23:34:36 +0700 Subject: [PATCH 122/745] fix failures of testNegativeJitterDuration, testNegativeBufferLimit, testHandlerOnRetryImpossible --- src/main/java/org/influxdb/impl/BatchProcessor.java | 2 ++ src/main/java/org/influxdb/impl/Preconditions.java | 11 +++++++++++ .../org/influxdb/impl/RetryCapableBatchWriter.java | 2 ++ 3 files changed, 15 insertions(+) diff --git a/src/main/java/org/influxdb/impl/BatchProcessor.java b/src/main/java/org/influxdb/impl/BatchProcessor.java index e1d6a5073..d72449287 100644 --- a/src/main/java/org/influxdb/impl/BatchProcessor.java +++ b/src/main/java/org/influxdb/impl/BatchProcessor.java @@ -170,6 +170,8 @@ public BatchProcessor build() { Objects.requireNonNull(this.influxDB, "influxDB"); Preconditions.checkPositiveNumber(this.actions, "actions"); Preconditions.checkPositiveNumber(this.flushInterval, "flushInterval"); + Preconditions.checkNotNegativeNumber(jitterInterval, "jitterInterval"); + Preconditions.checkNotNegativeNumber(bufferLimit, "bufferLimit"); Objects.requireNonNull(this.flushIntervalUnit, "flushIntervalUnit"); Objects.requireNonNull(this.threadFactory, "threadFactory"); Objects.requireNonNull(this.exceptionHandler, "exceptionHandler"); diff --git a/src/main/java/org/influxdb/impl/Preconditions.java b/src/main/java/org/influxdb/impl/Preconditions.java index 4a3297db6..e636ce1ce 100644 --- a/src/main/java/org/influxdb/impl/Preconditions.java +++ b/src/main/java/org/influxdb/impl/Preconditions.java @@ -36,6 +36,17 @@ public static void checkPositiveNumber(final Number number, final String name) t } } + /** + * Enforces that the number is not negative. + * @param number the number to test + * @param name variable name for reporting + * @throws IllegalArgumentException if the number is less or equal to 0 + */ + public static void checkNotNegativeNumber(final Number number, final String name) throws IllegalArgumentException { + if (number == null || number.doubleValue() < 0) { + throw new IllegalArgumentException("Expecting a positive or zero number for " + name); + } + } /** * Enforces that the duration is a valid influxDB duration. * @param duration the duration to test diff --git a/src/main/java/org/influxdb/impl/RetryCapableBatchWriter.java b/src/main/java/org/influxdb/impl/RetryCapableBatchWriter.java index 6871dfc54..001a02ef1 100644 --- a/src/main/java/org/influxdb/impl/RetryCapableBatchWriter.java +++ b/src/main/java/org/influxdb/impl/RetryCapableBatchWriter.java @@ -86,6 +86,8 @@ public void write(final BatchPoints batchPoints) { WriteResult result = tryToWrite(batchPoints); if (result.outcome == WriteResultOutcome.FAILED_RETRY_POSSIBLE) { addToBatchQueue(batchPoints); + } else { + exceptionHandler.accept(batchPoints.getPoints(), result.throwable); } } From 15ae56c0ac3b27d1ae19b44a2a6d53d41a63d5ce Mon Sep 17 00:00:00 2001 From: dubsky Date: Mon, 22 Jan 2018 21:30:37 +0100 Subject: [PATCH 123/745] fixing automatic BatchWriter retry --- .../org/influxdb/impl/BatchProcessor.java | 7 +-- .../java/org/influxdb/impl/BatchWriter.java | 6 ++- .../org/influxdb/impl/OneShotBatchWriter.java | 8 +++- .../impl/RetryCapableBatchWriter.java | 46 +++++++++++++------ .../impl/RetryCapableBatchWriterTest.java | 15 +++--- 5 files changed, 54 insertions(+), 28 deletions(-) diff --git a/src/main/java/org/influxdb/impl/BatchProcessor.java b/src/main/java/org/influxdb/impl/BatchProcessor.java index d72449287..3f2d7b4b1 100644 --- a/src/main/java/org/influxdb/impl/BatchProcessor.java +++ b/src/main/java/org/influxdb/impl/BatchProcessor.java @@ -6,6 +6,7 @@ import org.influxdb.dto.Point; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -281,6 +282,7 @@ void write() { List currentBatch = null; try { if (this.queue.isEmpty()) { + BatchProcessor.this.batchWriter.write(Collections.emptyList()); return; } //for batch on HTTP. @@ -316,9 +318,8 @@ void write() { } } - for (BatchPoints batchPoints : batchKeyToBatchPoints.values()) { - BatchProcessor.this.batchWriter.write(batchPoints); - } + BatchProcessor.this.batchWriter.write(batchKeyToBatchPoints.values()); + for (Entry> entry : udpPortToBatchPoints.entrySet()) { for (String lineprotocolStr : entry.getValue()) { BatchProcessor.this.influxDB.write(entry.getKey(), lineprotocolStr); diff --git a/src/main/java/org/influxdb/impl/BatchWriter.java b/src/main/java/org/influxdb/impl/BatchWriter.java index e2a169143..4763010f9 100644 --- a/src/main/java/org/influxdb/impl/BatchWriter.java +++ b/src/main/java/org/influxdb/impl/BatchWriter.java @@ -2,14 +2,16 @@ import org.influxdb.dto.BatchPoints; +import java.util.Collection; + /** * Write individual batches to InfluxDB. */ interface BatchWriter { /** * Write the given batch into InfluxDB. - * @param batchPoints to write + * @param batchPointsCollection to write */ - void write(BatchPoints batchPoints); + void write(Collection batchPointsCollection); } diff --git a/src/main/java/org/influxdb/impl/OneShotBatchWriter.java b/src/main/java/org/influxdb/impl/OneShotBatchWriter.java index c6594f6f5..e981fe627 100644 --- a/src/main/java/org/influxdb/impl/OneShotBatchWriter.java +++ b/src/main/java/org/influxdb/impl/OneShotBatchWriter.java @@ -3,6 +3,8 @@ import org.influxdb.InfluxDB; import org.influxdb.dto.BatchPoints; +import java.util.Collection; + /** * Batch writer that tries to write BatchPoints exactly once. */ @@ -15,7 +17,9 @@ class OneShotBatchWriter implements BatchWriter { } @Override - public void write(final BatchPoints batchPoints) { - influxDB.write(batchPoints); + public void write(final Collection batchPointsCollection) { + for (BatchPoints batchPoints : batchPointsCollection) { + influxDB.write(batchPoints); + } } } diff --git a/src/main/java/org/influxdb/impl/RetryCapableBatchWriter.java b/src/main/java/org/influxdb/impl/RetryCapableBatchWriter.java index 001a02ef1..db4e66d4c 100644 --- a/src/main/java/org/influxdb/impl/RetryCapableBatchWriter.java +++ b/src/main/java/org/influxdb/impl/RetryCapableBatchWriter.java @@ -5,6 +5,8 @@ import org.influxdb.dto.BatchPoints; import org.influxdb.dto.Point; +import java.util.Collection; +import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.ListIterator; @@ -34,23 +36,23 @@ class RetryCapableBatchWriter implements BatchWriter { private enum WriteResultOutcome { WRITTEN, FAILED_RETRY_POSSIBLE, FAILED_RETRY_IMPOSSIBLE } - private static class WriteResult { + private static final class WriteResult { static final WriteResult WRITTEN = new WriteResult(WriteResultOutcome.WRITTEN); WriteResultOutcome outcome; Throwable throwable; - public WriteResult(final WriteResultOutcome outcome) { + private WriteResult(final WriteResultOutcome outcome) { this.outcome = outcome; } - public WriteResult(final WriteResultOutcome outcome, final Throwable throwable) { + private WriteResult(final WriteResultOutcome outcome, final Throwable throwable) { this.outcome = outcome; this.throwable = throwable; } - public WriteResult(final InfluxDBException e) { + private WriteResult(final InfluxDBException e) { this.throwable = e; if (e.isRetryWorth()) { this.outcome = WriteResultOutcome.FAILED_RETRY_POSSIBLE; @@ -61,15 +63,15 @@ public WriteResult(final InfluxDBException e) { } @Override - public void write(final BatchPoints batchPoints) { + public void write(final Collection collection) { // empty the cached data first - ListIterator iterator = batchQueue.listIterator(); - while (iterator.hasNext()) { - BatchPoints entry = iterator.next(); + ListIterator batchQueueIterator = batchQueue.listIterator(); + while (batchQueueIterator.hasNext()) { + BatchPoints entry = batchQueueIterator.next(); WriteResult result = tryToWrite(entry); if (result.outcome == WriteResultOutcome.WRITTEN || result.outcome == WriteResultOutcome.FAILED_RETRY_IMPOSSIBLE) { - iterator.remove(); + batchQueueIterator.remove(); usedRetryBufferCapacity -= entry.getPoints().size(); // we are throwing out data, notify the client if (result.outcome == WriteResultOutcome.FAILED_RETRY_IMPOSSIBLE) { @@ -78,16 +80,30 @@ public void write(final BatchPoints batchPoints) { } else { // we cannot send more data otherwise we would write them in different // order than in which were submitted - addToBatchQueue(batchPoints); + for (BatchPoints batchPoints : collection) { + addToBatchQueue(batchPoints); + } return; } } // write the last given batch last so that duplicate data points get overwritten correctly - WriteResult result = tryToWrite(batchPoints); - if (result.outcome == WriteResultOutcome.FAILED_RETRY_POSSIBLE) { - addToBatchQueue(batchPoints); - } else { - exceptionHandler.accept(batchPoints.getPoints(), result.throwable); + Iterator collectionIterator = collection.iterator(); + while (collectionIterator.hasNext()) { + BatchPoints batchPoints = collectionIterator.next(); + WriteResult result = tryToWrite(batchPoints); + switch (result.outcome) { + case FAILED_RETRY_POSSIBLE: + addToBatchQueue(batchPoints); + while (collectionIterator.hasNext()) { + addToBatchQueue(collectionIterator.next()); + } + break; + case FAILED_RETRY_IMPOSSIBLE: + exceptionHandler.accept(batchPoints.getPoints(), result.throwable); + break; + default: + + } } } diff --git a/src/test/java/org/influxdb/impl/RetryCapableBatchWriterTest.java b/src/test/java/org/influxdb/impl/RetryCapableBatchWriterTest.java index 9ebf638a3..4e635556e 100644 --- a/src/test/java/org/influxdb/impl/RetryCapableBatchWriterTest.java +++ b/src/test/java/org/influxdb/impl/RetryCapableBatchWriterTest.java @@ -11,6 +11,8 @@ import org.mockito.ArgumentCaptor; import org.mockito.Mockito; +import java.util.Collection; +import java.util.Collections; import java.util.List; import java.util.function.BiConsumer; @@ -47,13 +49,13 @@ public void test() { Mockito.doThrow(recoverable).when(mockInfluxDB).write(bp2); Mockito.doThrow(recoverable).when(mockInfluxDB).write(bp3); // first one will fail with non-recoverable error - rw.write(bp0); + rw.write(Collections.singletonList(bp0)); // second one will fail with recoverable error - rw.write(bp1); + rw.write(Collections.singletonList(bp1)); // will fail with recoverable error again, will remove data due to buffer limit - rw.write(bp2); + rw.write(Collections.singletonList(bp2)); // will write fail with recoverable error - rw.write(bp3); + rw.write(Collections.singletonList(bp3)); ArgumentCaptor captor = ArgumentCaptor.forClass(BatchPoints.class); verify(mockInfluxDB, times(4)).write(captor.capture()); @@ -67,11 +69,12 @@ public void test() { Assert.assertEquals(capturedArgument1.get(2).getPoints().size(), 90); Assert.assertEquals(capturedArgument1.get(3).getPoints().size(), 98); - verify(errorHandler, times(1)).accept(any(),any()); + // error handler called twice; once for first unrecoverable write, se + verify(errorHandler, times(2)).accept(any(),any()); // will write data that previously were not sent, will send additional data Mockito.reset(mockInfluxDB); - rw.write(bp4); + rw.write(Collections.singletonList(bp4)); ArgumentCaptor captor2 = ArgumentCaptor.forClass(BatchPoints.class); verify(mockInfluxDB, times(2)).write(captor2.capture()); From 2cd63ade9b5be0a5b89be3c3b50f4a4cbab1ffc9 Mon Sep 17 00:00:00 2001 From: dubsky Date: Fri, 12 Jan 2018 17:00:23 +0100 Subject: [PATCH 124/745] merging changes from batch-iterval-jittering branch --- src/main/java/org/influxdb/BatchOptions.java | 111 ++++++++++++++--- .../org/influxdb/impl/BatchOptionsImpl.java | 112 ------------------ 2 files changed, 92 insertions(+), 131 deletions(-) delete mode 100644 src/main/java/org/influxdb/impl/BatchOptionsImpl.java diff --git a/src/main/java/org/influxdb/BatchOptions.java b/src/main/java/org/influxdb/BatchOptions.java index 7c611ea82..90c1add32 100644 --- a/src/main/java/org/influxdb/BatchOptions.java +++ b/src/main/java/org/influxdb/BatchOptions.java @@ -1,8 +1,8 @@ package org.influxdb; import org.influxdb.dto.Point; -import org.influxdb.impl.BatchOptionsImpl; +import java.util.concurrent.Executors; import java.util.concurrent.ThreadFactory; import java.util.function.BiConsumer; @@ -10,21 +10,53 @@ * BatchOptions are used to configure batching of individual data point writes * into InfluxDB. See {@link InfluxDB#enableBatch(BatchOptions)} */ -public interface BatchOptions { +public final class BatchOptions implements Cloneable { - BatchOptions DEFAULTS = BatchOptionsImpl.DEFAULTS; + /** + * Default batch options. This class is immutable, each configuration + * is built by taking the DEFAULTS and setting specific configuration + * properties. + */ + public static final BatchOptions DEFAULTS = new BatchOptions(); + + // default values here are consistent with Telegraf + public static final int DEFAULT_BATCH_ACTIONS_LIMIT = 1000; + public static final int DEFAULT_BATCH_INTERVAL_DURATION = 1000; + public static final int DEFAULT_JITTER_INTERVAL_DURATION = 0; + public static final int DEFAULT_BUFFER_LIMIT = 10000; + + private int actions = DEFAULT_BATCH_ACTIONS_LIMIT; + private int flushDuration = DEFAULT_BATCH_INTERVAL_DURATION; + private int jitterDuration = DEFAULT_JITTER_INTERVAL_DURATION; + private int bufferLimit = DEFAULT_BUFFER_LIMIT; + + private ThreadFactory threadFactory = Executors.defaultThreadFactory(); + BiConsumer, Throwable> exceptionHandler = (points, throwable) -> { + }; + private InfluxDB.ConsistencyLevel consistency = InfluxDB.ConsistencyLevel.ONE; + + private BatchOptions() { + } /** * @param actions the number of actions to collect * @return the BatchOptions instance to be able to use it in a fluent manner. */ - BatchOptions actions(final int actions); + public BatchOptions actions(final int actions) { + BatchOptions clone = getClone(); + clone.actions = actions; + return clone; + } /** * @param flushDuration the time to wait at most (milliseconds). * @return the BatchOptions instance to be able to use it in a fluent manner. */ - BatchOptions flushDuration(final int flushDuration); + public BatchOptions flushDuration(final int flushDuration) { + BatchOptions clone = getClone(); + clone.flushDuration = flushDuration; + return clone; + } /** * Jitters the batch flush interval by a random amount. This is primarily to avoid @@ -34,7 +66,11 @@ public interface BatchOptions { * @param jitterDuration (milliseconds) * @return the BatchOptions instance to be able to use it in a fluent manner. */ - BatchOptions jitterDuration(final int jitterDuration); + public BatchOptions jitterDuration(final int jitterDuration) { + BatchOptions clone = getClone(); + clone.jitterDuration = jitterDuration; + return clone; + } /** * The client maintains a buffer for failed writes so that the writes will be retried later on. This may @@ -46,62 +82,99 @@ public interface BatchOptions { * @param bufferLimit maximum number of points stored in the retry buffer * @return the BatchOptions instance to be able to use it in a fluent manner. */ - BatchOptions bufferLimit(final int bufferLimit); + public BatchOptions bufferLimit(final int bufferLimit) { + BatchOptions clone = getClone(); + clone.bufferLimit = bufferLimit; + return clone; + } /** * @param threadFactory a ThreadFactory instance to be used * @return the BatchOptions instance to be able to use it in a fluent manner. */ - BatchOptions threadFactory(final ThreadFactory threadFactory); + public BatchOptions threadFactory(final ThreadFactory threadFactory) { + BatchOptions clone = getClone(); + clone.threadFactory = threadFactory; + return clone; + } /** * @param exceptionHandler a consumer function to handle asynchronous errors * @return the BatchOptions instance to be able to use it in a fluent manner. */ - BatchOptions exceptionHandler(final BiConsumer, Throwable> exceptionHandler); + public BatchOptions exceptionHandler(final BiConsumer, Throwable> exceptionHandler) { + BatchOptions clone = getClone(); + clone.exceptionHandler = exceptionHandler; + return clone; + } /** * @param consistency cluster consistency setting (how many nodes have to store data points - * to treat a write as a success) + * to treat a write as a success) * @return the BatchOptions instance to be able to use it in a fluent manner. */ - BatchOptions consistency(final InfluxDB.ConsistencyLevel consistency); - + public BatchOptions consistency(final InfluxDB.ConsistencyLevel consistency) { + BatchOptions clone = getClone(); + clone.consistency = consistency; + return clone; + } /** * @return actions the number of actions to collect */ - int getActions(); + public int getActions() { + return actions; + } /** * @return flushDuration the time to wait at most (milliseconds). */ - int getFlushDuration(); + public int getFlushDuration() { + return flushDuration; + } /** * @return batch flush interval jitter value (milliseconds) */ - int getJitterDuration(); + public int getJitterDuration() { + return jitterDuration; + } /** * @return Maximum number of points stored in the retry buffer, see {@link BatchOptions#bufferLimit(int)} */ - int getBufferLimit(); + public int getBufferLimit() { + return bufferLimit; + } /** * @return a ThreadFactory instance to be used */ - ThreadFactory getThreadFactory(); + public ThreadFactory getThreadFactory() { + return threadFactory; + } /** * @return a consumer function to handle asynchronous errors */ - BiConsumer, Throwable> getExceptionHandler(); + public BiConsumer, Throwable> getExceptionHandler() { + return exceptionHandler; + } /** * @return cluster consistency setting (how many nodes have to store data points * to treat a write as a success) */ - InfluxDB.ConsistencyLevel getConsistency(); + public InfluxDB.ConsistencyLevel getConsistency() { + return consistency; + } + + private BatchOptions getClone() { + try { + return (BatchOptions) this.clone(); + } catch (CloneNotSupportedException e) { + throw new RuntimeException(e); + } + } } diff --git a/src/main/java/org/influxdb/impl/BatchOptionsImpl.java b/src/main/java/org/influxdb/impl/BatchOptionsImpl.java deleted file mode 100644 index 21cc2e757..000000000 --- a/src/main/java/org/influxdb/impl/BatchOptionsImpl.java +++ /dev/null @@ -1,112 +0,0 @@ -package org.influxdb.impl; - -import org.influxdb.BatchOptions; -import org.influxdb.InfluxDB; -import org.influxdb.dto.Point; - -import java.util.concurrent.Executors; -import java.util.concurrent.ThreadFactory; -import java.util.function.BiConsumer; - -public final class BatchOptionsImpl implements BatchOptions, Cloneable { - - public static final BatchOptions DEFAULTS = new BatchOptionsImpl(); - - // default values here are consistent with Telegraf - public static final int DEFAULT_BATCH_ACTIONS_LIMIT = 1000; - public static final int DEFAULT_BATCH_INTERVAL_DURATION = 1000; - public static final int DEFAULT_JITTER_INTERVAL_DURATION = 0; - public static final int DEFAULT_BUFFER_LIMIT = 10000; - - private int actions = DEFAULT_BATCH_ACTIONS_LIMIT; - private int flushDuration = DEFAULT_BATCH_INTERVAL_DURATION; - private int jitterDuration = DEFAULT_JITTER_INTERVAL_DURATION; - private int bufferLimit = DEFAULT_BUFFER_LIMIT; - - ThreadFactory threadFactory = Executors.defaultThreadFactory(); - BiConsumer, Throwable> exceptionHandler = (points, throwable) -> { - }; - InfluxDB.ConsistencyLevel consistency = InfluxDB.ConsistencyLevel.ONE; - - private BatchOptionsImpl() { - } - - public BatchOptions actions(final int actions) { - BatchOptionsImpl clone = getClone(); - clone.actions = actions; - return clone; - } - - public BatchOptions flushDuration(final int flushDuration) { - BatchOptionsImpl clone = getClone(); - clone.flushDuration = flushDuration; - return clone; - } - - public BatchOptions jitterDuration(final int jitterDuration) { - BatchOptionsImpl clone = getClone(); - clone.jitterDuration = jitterDuration; - return clone; - } - - public BatchOptions bufferLimit(final int bufferLimit) { - BatchOptionsImpl clone = getClone(); - clone.bufferLimit = bufferLimit; - return clone; - } - - public BatchOptions threadFactory(final ThreadFactory threadFactory) { - BatchOptionsImpl clone = getClone(); - clone.threadFactory = threadFactory; - return clone; - } - - public BatchOptions exceptionHandler(final BiConsumer, Throwable> exceptionHandler) { - BatchOptionsImpl clone = getClone(); - clone.exceptionHandler = exceptionHandler; - return clone; - } - - public BatchOptions consistency(final InfluxDB.ConsistencyLevel consistency) { - BatchOptionsImpl clone = getClone(); - clone.consistency = consistency; - return clone; - } - - private BatchOptionsImpl getClone() { - try { - return (BatchOptionsImpl) this.clone(); - } catch (CloneNotSupportedException e) { - throw new RuntimeException(e); - } - } - - public int getActions() { - return actions; - } - - public int getFlushDuration() { - return flushDuration; - } - - public int getJitterDuration() { - return jitterDuration; - } - - public InfluxDB.ConsistencyLevel getConsistency() { - return consistency; - } - - public ThreadFactory getThreadFactory() { - return threadFactory; - } - - public BiConsumer, Throwable> getExceptionHandler() { - return exceptionHandler; - } - - @Override - public int getBufferLimit() { - return bufferLimit; - } -} From 4d25472c250a75912e0bcf8408227db0723223f1 Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Tue, 23 Jan 2018 14:48:02 +0700 Subject: [PATCH 125/745] fix testHandlerOnRetryImpossible - increase waiting time --- src/test/java/org/influxdb/BatchOptionsTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/org/influxdb/BatchOptionsTest.java b/src/test/java/org/influxdb/BatchOptionsTest.java index 86f720bce..27aad3cf6 100644 --- a/src/test/java/org/influxdb/BatchOptionsTest.java +++ b/src/test/java/org/influxdb/BatchOptionsTest.java @@ -287,7 +287,7 @@ public void testHandlerOnRetryImpossible() throws InterruptedException { writeSomePoints(spy, 1); - Thread.sleep(1000); + Thread.sleep(3000); verify(mockHandler, times(1)).accept(any(), any()); QueryResult result = influxDB.query(new Query("select * from weather", dbName)); From 03ebb44b5a68ddd1444c62556cb7cd1c767d734c Mon Sep 17 00:00:00 2001 From: dubsky Date: Tue, 23 Jan 2018 10:12:53 +0100 Subject: [PATCH 126/745] code review comments inside --- .../java/org/influxdb/BatchOptionsTest.java | 31 ++++++++++++++----- 1 file changed, 24 insertions(+), 7 deletions(-) diff --git a/src/test/java/org/influxdb/BatchOptionsTest.java b/src/test/java/org/influxdb/BatchOptionsTest.java index 27aad3cf6..84c33e66a 100644 --- a/src/test/java/org/influxdb/BatchOptionsTest.java +++ b/src/test/java/org/influxdb/BatchOptionsTest.java @@ -74,6 +74,8 @@ public void accept(Iterable points, Throwable throwable) { /** * Test the implementation of {@link BatchOptions#actions(int)} }. + * TODO: try to make this run faster, set lower flush duration (eg. 100) + * TODO: you have to test that the points are not in the DB before flush duration as well */ @Test public void testActionsSetting() throws InterruptedException { @@ -104,7 +106,8 @@ public void testActionsSetting() throws InterruptedException { /** * Test the implementation of {@link BatchOptions#flushDuration(int)} }. - * @throws InterruptedException + * @throws InterruptedException + * TODO: Remove this completely it doesn't test anything new compared to testActionSetting */ @Test public void testFlushDuration() throws InterruptedException { @@ -132,7 +135,9 @@ public void testFlushDuration() throws InterruptedException { /** * Test the implementation of {@link BatchOptions#jitterDuration(int)} }. - * @throws InterruptedException + * @throws InterruptedException + * TODO: Make this run faster, set flush duration to 100. Set jitter interval to 500. + * TODO: Test that after 100ms the points are not in the DB yet. */ @Test public void testJitterDuration() throws InterruptedException { @@ -171,7 +176,8 @@ public void testNegativeJitterDuration() { Assertions.assertThrows(IllegalArgumentException.class, () -> { BatchOptions options = BatchOptions.DEFAULTS.jitterDuration(-10); influxDB.enableBatch(options); - + // TODO: the lines below has no function, remove it + // TODO: you should use dedicated influxDB object so that other tests are not influenced influxDB.disableBatch(); options = BatchOptions.DEFAULTS.jitterDuration(0); influxDB.enableBatch(); @@ -204,6 +210,8 @@ private void doTestBufferLimit(int bufferLimit, int actions) throws InterruptedE /** * Test the implementation of {@link BatchOptions#bufferLimit(int)} }. + * TODO: Name the test functions as it is clear what is it testing + * TODO: Remove this test until the test scenario is clear */ @Test public void testBufferLimit1() throws InterruptedException { @@ -214,6 +222,8 @@ public void testBufferLimit1() throws InterruptedException { /** * Test the implementation of {@link BatchOptions#bufferLimit(int)} }. + * TODO: Name the test functions as it is clear what is it testing + * TODO: Remove this test until the test scenario is clear */ @Test public void testBufferLimit2() throws InterruptedException { @@ -230,7 +240,8 @@ public void testNegativeBufferLimit() { Assertions.assertThrows(IllegalArgumentException.class, () -> { BatchOptions options = BatchOptions.DEFAULTS.bufferLimit(-10); influxDB.enableBatch(options); - + // TODO: the lines below has no function, remove it + // TODO: you should use dedicated influxDB object so that other tests are not influenced influxDB.disableBatch(); options = BatchOptions.DEFAULTS.bufferLimit(0); influxDB.enableBatch(); @@ -240,7 +251,9 @@ public void testNegativeBufferLimit() { /** * Test the implementation of {@link BatchOptions#threadFactory(ThreadFactory)} }. - * @throws InterruptedException + * @throws InterruptedException + * TODO: there is no assertion testing that the thread factory is used + * TODO: make this test run faster eg. 100ms */ @Test public void testThreadFactory() throws InterruptedException { @@ -268,7 +281,8 @@ public void testThreadFactory() throws InterruptedException { /** * Test the implementation of {@link BatchOptions#exceptionHandler(BiConsumer)} }. - * @throws InterruptedException + * @throws InterruptedException + * TODO: make this test run faster eg. 100ms */ @Test public void testHandlerOnRetryImpossible() throws InterruptedException { @@ -302,7 +316,8 @@ public void testHandlerOnRetryImpossible() throws InterruptedException { /** * Test the implementation of {@link BatchOptions#exceptionHandler(BiConsumer)} }. - * @throws InterruptedException + * @throws InterruptedException + * TODO: make this test run faster eg. 100ms */ @Test public void testHandlerOnRetryPossible() throws InterruptedException { @@ -351,6 +366,8 @@ public Object answer(InvocationOnMock invocation) throws Throwable { /** * Test the implementation of {@link BatchOptions#consistency(InfluxDB.ConsistencyLevel)} }. * @throws InterruptedException + * TODO: there is no assertion testing that the consistency value set is propagated to InfluxDB + * TODO: make this test run faster eg. 100ms */ @Test public void testConsistency() throws InterruptedException { From 2e144604f992f40571bf7a8a239c09c46bb1468b Mon Sep 17 00:00:00 2001 From: dubsky Date: Wed, 24 Jan 2018 13:03:06 +0100 Subject: [PATCH 127/745] Documentation for enableBatch(BatchOptions). Changed the documentation flow so that batching is the default/recommended setup. --- README.md | 139 ++++++++++++++++++++++++++++++++---------------------- 1 file changed, 83 insertions(+), 56 deletions(-) diff --git a/README.md b/README.md index df5069b83..40688258f 100644 --- a/README.md +++ b/README.md @@ -12,48 +12,68 @@ To connect to InfluxDB 0.8.x you need to use influxdb-java version 1.6. This implementation is meant as a Java rewrite of the influxdb-go package. All low level REST Api calls are available. -## Usages +## Usage -### Basic Usages: +### Basic Usage: + +This is a recommended approach to write data points into InfluxDB. The influxdb-java +client is storing your writes into an internal buffer and flushes them asynchronously +to InfluxDB at a fixed flush interval to achieve good performance on both client and +server side. This requires influxdb-java v2.7 or newer. + +If you want to write data points immediately into InfluxDB and synchronously process +resulting errors see [this section.](#synchronous-writes) ```java InfluxDB influxDB = InfluxDBFactory.connect("http://172.17.0.2:8086", "root", "root"); String dbName = "aTimeSeries"; influxDB.createDatabase(dbName); +influxDB.setDatabase(dbName); String rpName = "aRetentionPolicy"; influxDB.createRetentionPolicy(rpName, dbName, "30d", "30m", 2, true); +influxDB.setRetentionPolicy(rpName); + +influxDB.enableBatch(BatchOptions.DEFAULTS); + +influxDB.write(Point.measurement("cpu") + .time(System.currentTimeMillis(), TimeUnit.MILLISECONDS) + .addField("idle", 90L) + .addField("user", 9L) + .addField("system", 1L) + .build()); + +influxDB.write(Point.measurement("disk") + .time(System.currentTimeMillis(), TimeUnit.MILLISECONDS) + .addField("used", 80L) + .addField("free", 1L) + .build()); -BatchPoints batchPoints = BatchPoints - .database(dbName) - .tag("async", "true") - .retentionPolicy(rpName) - .consistency(ConsistencyLevel.ALL) - .build(); -Point point1 = Point.measurement("cpu") - .time(System.currentTimeMillis(), TimeUnit.MILLISECONDS) - .addField("idle", 90L) - .addField("user", 9L) - .addField("system", 1L) - .build(); -Point point2 = Point.measurement("disk") - .time(System.currentTimeMillis(), TimeUnit.MILLISECONDS) - .addField("used", 80L) - .addField("free", 1L) - .build(); -batchPoints.point(point1); -batchPoints.point(point2); -influxDB.write(batchPoints); Query query = new Query("SELECT idle FROM cpu", dbName); influxDB.query(query); influxDB.dropRetentionPolicy(rpName, dbName); influxDB.deleteDatabase(dbName); +influxDB.close(); ``` + + +Any errors that happen during the batch flush won't leak into the caller of the `write` method. By default, any kind of errors will be just logged with "SEVERE" level. +If you need to be notified and do some custom logic when such asynchronous errors happen, you can add an error handler with a `BiConsumer, Throwable>` using the overloaded `enableBatch` method: + +```java +influxDB.enableBatch(BatchOptions.DEFAULTS.exceptionHandler( + (failedPoints, throwable) -> { /* custom error handling here */ }) +); +``` + Note: +* Batching functionality creates an internal thread pool that needs to be shutdown explicitly as part of a graceful application shut-down, or the application will not shut down properly. To do so simply call: ```influxDB.close()``` +* `InfluxDB.enableBatch(BatchOptions)` is available since version 2.9. Prior versions use `InfluxDB.enableBatch(actions, flushInterval, timeUnit)` or similar based on the configuration parameters you want to set. * APIs to create and drop retention policies are supported only in versions > 2.7 * If you are using influxdb < 2.8, you should use retention policy: 'autogen' * If you are using influxdb < 1.0.0, you should use 'default' instead of 'autogen' -If your application produces only single Points, you can enable the batching functionality of influxdb-java: + +If your points are written into different databases and retention policies, the more complex InfluxDB.write() methods can be used: ```java InfluxDB influxDB = InfluxDBFactory.connect("http://172.17.0.2:8086", "root", "root"); @@ -63,7 +83,7 @@ String rpName = "aRetentionPolicy"; influxDB.createRetentionPolicy(rpName, dbName, "30d", "30m", 2, true); // Flush every 2000 Points, at least every 100ms -influxDB.enableBatch(2000, 100, TimeUnit.MILLISECONDS); +influxDB.enableBatch(BatchOptions.DEFAULTS.actions(2000).flushDuration(100)); Point point1 = Point.measurement("cpu") .time(System.currentTimeMillis(), TimeUnit.MILLISECONDS) @@ -83,53 +103,47 @@ Query query = new Query("SELECT idle FROM cpu", dbName); influxDB.query(query); influxDB.dropRetentionPolicy(rpName, dbName); influxDB.deleteDatabase(dbName); +influxDB.close(); ``` -Note that the batching functionality creates an internal thread pool that needs to be shutdown explicitly as part of a graceful application shut-down, or the application will not shut down properly. To do so simply call: ```influxDB.close()``` -If all of your points are written to the same database and retention policy, the simpler write() methods can be used. -This requires influxdb-java v2.7 or newer. +#### Synchronous writes + +If you want to write the data points immediately to InfluxDB (and handle the errors as well) without any delays see the following example: ```java InfluxDB influxDB = InfluxDBFactory.connect("http://172.17.0.2:8086", "root", "root"); String dbName = "aTimeSeries"; influxDB.createDatabase(dbName); -influxDB.setDatabase(dbName); String rpName = "aRetentionPolicy"; influxDB.createRetentionPolicy(rpName, dbName, "30d", "30m", 2, true); -influxDB.setRetentionPolicy(rpName); - -// Flush every 2000 Points, at least every 100ms -influxDB.enableBatch(2000, 100, TimeUnit.MILLISECONDS); - -influxDB.write(Point.measurement("cpu") - .time(System.currentTimeMillis(), TimeUnit.MILLISECONDS) - .addField("idle", 90L) - .addField("user", 9L) - .addField("system", 1L) - .build()); - -influxDB.write(Point.measurement("disk") - .time(System.currentTimeMillis(), TimeUnit.MILLISECONDS) - .addField("used", 80L) - .addField("free", 1L) - .build()); +BatchPoints batchPoints = BatchPoints + .database(dbName) + .tag("async", "true") + .retentionPolicy(rpName) + .consistency(ConsistencyLevel.ALL) + .build(); +Point point1 = Point.measurement("cpu") + .time(System.currentTimeMillis(), TimeUnit.MILLISECONDS) + .addField("idle", 90L) + .addField("user", 9L) + .addField("system", 1L) + .build(); +Point point2 = Point.measurement("disk") + .time(System.currentTimeMillis(), TimeUnit.MILLISECONDS) + .addField("used", 80L) + .addField("free", 1L) + .build(); +batchPoints.point(point1); +batchPoints.point(point2); +influxDB.write(batchPoints); Query query = new Query("SELECT idle FROM cpu", dbName); influxDB.query(query); influxDB.dropRetentionPolicy(rpName, dbName); influxDB.deleteDatabase(dbName); ``` -Also note that any errors that happen during the batch flush won't leak into the caller of the `write` method. By default, any kind of errors will be just logged with "SEVERE" level. - -If you need to be notified and do some custom logic when such asynchronous errors happen, you can add an error handler with a `BiConsumer, Throwable>` using the overloaded `enableBatch` method: - -```java -// Flush every 2000 Points, at least every 100ms -influxDB.enableBatch(2000, 100, TimeUnit.MILLISECONDS, Executors.defaultThreadFactory(), (failedPoints, throwable) -> { /* custom error handling here */ }); -``` - -### Advanced Usages: +### Advanced Usage: #### Gzip's support (version 2.5+ required): @@ -157,7 +171,6 @@ Query query = new Query("SELECT idle FROM cpu", dbName); influxDB.query(query, 20, queryResult -> System.out.println(queryResult)); ``` - #### QueryResult mapper to POJO (version 2.7+ required): An alternative way to handle the QueryResult object is now available. @@ -240,6 +253,20 @@ this.influxDB.query(new Query("SELECT idle FROM cpu", dbName), queryResult -> { }); ``` +#### Batch flush interval jittering (version 2.9+ required) + +When using large number of influxdb-java clients against a single server it may happen that all the clients +will submit their buffered points at the same time and possibly overloading the server. This is usually happening +when all the clients are started at once - for instance as members of cloud hosted large cluster networks. +If all the clients have the same flushDuration set this situation will repeat periodically. + +To solve this situation the influxdb-java offers an option to offset the flushDuration by a random interval so that +the clients will flush their buffers in different intervals: + +```java +influxDB.enableBatch(BatchOptions.DEFAULTS.jitterDuration(500); +``` + ### Other Usages: For additional usage examples have a look at [InfluxDBTest.java](https://github.com/influxdb/influxdb-java/blob/master/src/test/java/org/influxdb/InfluxDBTest.java "InfluxDBTest.java") From 5b6d2465d3a947e0f7cbc87ea5b07d14893685f7 Mon Sep 17 00:00:00 2001 From: dubsky Date: Tue, 30 Jan 2018 16:45:21 +0100 Subject: [PATCH 128/745] renaming BatchOptions.setConsistency to BatchOptions.consistency --- src/main/java/org/influxdb/BatchOptions.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/org/influxdb/BatchOptions.java b/src/main/java/org/influxdb/BatchOptions.java index e9e8c87fc..316dccd00 100644 --- a/src/main/java/org/influxdb/BatchOptions.java +++ b/src/main/java/org/influxdb/BatchOptions.java @@ -95,7 +95,7 @@ public BatchOptions exceptionHandler(final BiConsumer, Throwable * to treat a write as a success) * @return the BatchOptions instance to be able to use it in a fluent manner. */ - public BatchOptions setConsistency(final InfluxDB.ConsistencyLevel consistency) { + public BatchOptions consistency(final InfluxDB.ConsistencyLevel consistency) { BatchOptions clone = getClone(); clone.consistency = consistency; return clone; From 028fafff0fd331deef7a6144df0b7a5d9ee05311 Mon Sep 17 00:00:00 2001 From: dubsky Date: Tue, 30 Jan 2018 16:46:23 +0100 Subject: [PATCH 129/745] Adding test for parameters setting --- .../java/org/influxdb/BatchOptionsTest.java | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/src/test/java/org/influxdb/BatchOptionsTest.java b/src/test/java/org/influxdb/BatchOptionsTest.java index f58a685af..7abd67532 100644 --- a/src/test/java/org/influxdb/BatchOptionsTest.java +++ b/src/test/java/org/influxdb/BatchOptionsTest.java @@ -10,8 +10,10 @@ import org.junit.runner.RunWith; import java.io.IOException; +import java.util.concurrent.Executors; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; +import java.util.function.BiConsumer; @RunWith(JUnitPlatform.class) public class BatchOptionsTest { @@ -23,6 +25,29 @@ public void setUp() throws InterruptedException, IOException { this.influxDB = TestUtils.connectToInfluxDB(); } + @Test + public void testParametersSet() { + BatchOptions options = BatchOptions.DEFAULTS.actions(3); + Assertions.assertEquals(3, options.getActions()); + options=options.consistency(InfluxDB.ConsistencyLevel.ANY); + Assertions.assertEquals(InfluxDB.ConsistencyLevel.ANY, options.getConsistency()); + options=options.flushDuration(1001); + Assertions.assertEquals(1001, options.getFlushDuration()); + options=options.jitterDuration(104); + Assertions.assertEquals(104, options.getJitterDuration()); + BiConsumer, Throwable> handler=new BiConsumer, Throwable>() { + @Override + public void accept(Iterable points, Throwable throwable) { + + } + }; + options=options.exceptionHandler(handler); + Assertions.assertEquals(handler, options.getExceptionHandler()); + ThreadFactory tf= Executors.defaultThreadFactory(); + options=options.threadFactory(tf); + Assertions.assertEquals(tf, options.getThreadFactory()); + } + /** * Test the implementation of {@link InfluxDB#enableBatch(int, int, TimeUnit, ThreadFactory)}. */ From 7478124167b5676104aa647e3781f82e35711dc4 Mon Sep 17 00:00:00 2001 From: dubsky Date: Tue, 30 Jan 2018 17:42:27 +0100 Subject: [PATCH 130/745] prevent lost writes when client closes before flush interval --- src/main/java/org/influxdb/impl/BatchProcessor.java | 1 + src/main/java/org/influxdb/impl/BatchWriter.java | 5 +++++ .../java/org/influxdb/impl/OneShotBatchWriter.java | 5 +++++ .../org/influxdb/impl/RetryCapableBatchWriter.java | 11 +++++++++++ 4 files changed, 22 insertions(+) diff --git a/src/main/java/org/influxdb/impl/BatchProcessor.java b/src/main/java/org/influxdb/impl/BatchProcessor.java index 3f2d7b4b1..b0f153c98 100644 --- a/src/main/java/org/influxdb/impl/BatchProcessor.java +++ b/src/main/java/org/influxdb/impl/BatchProcessor.java @@ -362,6 +362,7 @@ public void run() { void flushAndShutdown() { this.write(); this.scheduler.shutdown(); + this.batchWriter.close(); } /** diff --git a/src/main/java/org/influxdb/impl/BatchWriter.java b/src/main/java/org/influxdb/impl/BatchWriter.java index 4763010f9..2a71ebddd 100644 --- a/src/main/java/org/influxdb/impl/BatchWriter.java +++ b/src/main/java/org/influxdb/impl/BatchWriter.java @@ -13,5 +13,10 @@ interface BatchWriter { * @param batchPointsCollection to write */ void write(Collection batchPointsCollection); + + /** + * FLush all cached writes into InfluxDB. The application is about to exit. + */ + void close(); } diff --git a/src/main/java/org/influxdb/impl/OneShotBatchWriter.java b/src/main/java/org/influxdb/impl/OneShotBatchWriter.java index e981fe627..96754f144 100644 --- a/src/main/java/org/influxdb/impl/OneShotBatchWriter.java +++ b/src/main/java/org/influxdb/impl/OneShotBatchWriter.java @@ -22,4 +22,9 @@ public void write(final Collection batchPointsCollection) { influxDB.write(batchPoints); } } + + @Override + public void close() { + + } } diff --git a/src/main/java/org/influxdb/impl/RetryCapableBatchWriter.java b/src/main/java/org/influxdb/impl/RetryCapableBatchWriter.java index db4e66d4c..e5ec0fe42 100644 --- a/src/main/java/org/influxdb/impl/RetryCapableBatchWriter.java +++ b/src/main/java/org/influxdb/impl/RetryCapableBatchWriter.java @@ -107,6 +107,17 @@ public void write(final Collection collection) { } } + @Override + public void close() { + // try to write everything queued / buffered + for (BatchPoints points : batchQueue) { + WriteResult result = tryToWrite(points); + if (result.outcome != WriteResultOutcome.WRITTEN) { + exceptionHandler.accept(points.getPoints(), result.throwable); + } + } + } + private WriteResult tryToWrite(final BatchPoints batchPoints) { try { influxDB.write(batchPoints); From 042b3370de691e3575c8dea81b962df667fa023e Mon Sep 17 00:00:00 2001 From: dubsky Date: Wed, 31 Jan 2018 12:44:17 +0100 Subject: [PATCH 131/745] removing dependency on org.json, using moshi, fixing synchronization issue with RetryCapableBatchWriter, added exception for authorization problems --- pom.xml | 5 -- .../java/org/influxdb/InfluxDBException.java | 73 +++++++++++++++---- .../java/org/influxdb/impl/InfluxDBImpl.java | 15 +--- .../impl/RetryCapableBatchWriter.java | 8 +- .../impl/RetryCapableBatchWriterTest.java | 4 +- 5 files changed, 70 insertions(+), 35 deletions(-) diff --git a/pom.xml b/pom.xml index 6510af401..bd92d18d7 100644 --- a/pom.xml +++ b/pom.xml @@ -261,10 +261,5 @@ logging-interceptor 3.9.1 - - org.json - json - 20171018 - diff --git a/src/main/java/org/influxdb/InfluxDBException.java b/src/main/java/org/influxdb/InfluxDBException.java index 5444bb354..2db463d04 100644 --- a/src/main/java/org/influxdb/InfluxDBException.java +++ b/src/main/java/org/influxdb/InfluxDBException.java @@ -1,5 +1,8 @@ package org.influxdb; +import com.squareup.moshi.JsonAdapter; +import com.squareup.moshi.Moshi; + /** * A wrapper for various exceptions caused while interacting with InfluxDB. * @@ -26,12 +29,22 @@ public boolean isRetryWorth() { return true; } + /* See https://github.com/influxdata/influxdb/blob/master/tsdb/shard.go */ static final String FIELD_TYPE_CONFLICT_ERROR = "field type conflict"; + /* See https://github.com/influxdata/influxdb/blob/master/coordinator/points_writer.go */ static final String POINTS_BEYOND_RETENTION_POLICY_ERROR = "points beyond retention policy"; + /* See https://github.com/influxdata/influxdb/blob/master/models/points.go */ static final String UNABLE_TO_PARSE_ERROR = "unable to parse"; + /* See https://github.com/influxdata/telegraf/blob/master/plugins/outputs/influxdb/influxdb.go */ static final String HINTED_HAND_OFF_QUEUE_NOT_EMPTY_ERROR = "hinted handoff queue not empty"; - static final String DATABASE_NOT_FOUND_ERROR = "database not found"; + /* See https://github.com/influxdata/influxdb/blob/master/tsdb/engine/tsm1/cache.go */ static final String CACHE_MAX_MEMORY_SIZE_EXCEEDED_ERROR = "cache-max-memory-size exceeded"; + /* For all messages below see https://github.com/influxdata/influxdb/blob/master/services/httpd/handler.go */ + static final String DATABASE_NOT_FOUND_ERROR = "database not found"; + static final String USER_REQUIRED_ERROR = "user is required to write to database"; + static final String USER_NOT_AUTHORIZED_ERROR = "user is not authorized to write to database"; + static final String AUTHORIZATION_FAILED_ERROR = "authorization failed"; + static final String USERNAME_REQUIRED_ERROR = "username required"; public static final class DatabaseNotFoundError extends InfluxDBException { private DatabaseNotFoundError(final String message) { @@ -103,26 +116,56 @@ public boolean isRetryWorth() { } } - public static InfluxDBException buildExceptionForErrorState(final String error) { - if (error.contains(DATABASE_NOT_FOUND_ERROR)) { - return new DatabaseNotFoundError(error); + public static final class AuthorizationFailedException extends InfluxDBException { + public AuthorizationFailedException(final String message) { + super(message); + } + + public boolean isRetryWorth() { + return false; + } + } + + private static InfluxDBException buildExceptionFromErrorMessage(final String errorMessage) { + if (errorMessage.contains(DATABASE_NOT_FOUND_ERROR)) { + return new DatabaseNotFoundError(errorMessage); + } + if (errorMessage.contains(POINTS_BEYOND_RETENTION_POLICY_ERROR)) { + return new PointsBeyondRetentionPolicyException(errorMessage); } - if (error.contains(POINTS_BEYOND_RETENTION_POLICY_ERROR)) { - return new PointsBeyondRetentionPolicyException(error); + if (errorMessage.contains(FIELD_TYPE_CONFLICT_ERROR)) { + return new FieldTypeConflictException(errorMessage); } - if (error.contains(FIELD_TYPE_CONFLICT_ERROR)) { - return new FieldTypeConflictException(error); + if (errorMessage.contains(UNABLE_TO_PARSE_ERROR)) { + return new UnableToParseException(errorMessage); } - if (error.contains(UNABLE_TO_PARSE_ERROR)) { - return new UnableToParseException(error); + if (errorMessage.contains(HINTED_HAND_OFF_QUEUE_NOT_EMPTY_ERROR)) { + return new HintedHandOffQueueNotEmptyException(errorMessage); } - if (error.contains(HINTED_HAND_OFF_QUEUE_NOT_EMPTY_ERROR)) { - return new HintedHandOffQueueNotEmptyException(error); + if (errorMessage.contains(CACHE_MAX_MEMORY_SIZE_EXCEEDED_ERROR)) { + return new CacheMaxMemorySizeExceededException(errorMessage); } - if (error.contains(CACHE_MAX_MEMORY_SIZE_EXCEEDED_ERROR)) { - return new CacheMaxMemorySizeExceededException(error); + if (errorMessage.contains(USER_REQUIRED_ERROR) + || errorMessage.contains(USER_NOT_AUTHORIZED_ERROR) + || errorMessage.contains(AUTHORIZATION_FAILED_ERROR) + || errorMessage.contains(USERNAME_REQUIRED_ERROR)) { + return new AuthorizationFailedException(errorMessage); } - throw new InfluxDBException(error); + return new InfluxDBException(errorMessage); } + private static class ErrorMessage { + public String error; + } + + public static InfluxDBException buildExceptionForErrorState(final String errorBody) { + try { + Moshi moshi = new Moshi.Builder().build(); + JsonAdapter adapter = moshi.adapter(ErrorMessage.class).lenient(); + ErrorMessage errorMessage = adapter.fromJson(errorBody); + return InfluxDBException.buildExceptionFromErrorMessage(errorMessage.error); + } catch (Exception e) { + return new InfluxDBException(errorBody); + } + } } diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index 701f2f5c2..ee221ab4d 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -24,8 +24,6 @@ import org.influxdb.dto.QueryResult; import org.influxdb.impl.BatchProcessor.HttpBatchEntry; import org.influxdb.impl.BatchProcessor.UdpBatchEntry; -import org.json.JSONException; -import org.json.JSONObject; import retrofit2.Call; import retrofit2.Callback; import retrofit2.Response; @@ -572,6 +570,9 @@ private Call callQuery(final Query query) { return call; } + static class ErrorMessage { + public String error; + } private T execute(final Call call) { try { @@ -580,15 +581,7 @@ private T execute(final Call call) { return response.body(); } try (ResponseBody errorBody = response.errorBody()) { - try { - JSONObject body = new JSONObject(errorBody.string()); - Object error = body.getString("error"); - if (error != null && error instanceof String) { - throw InfluxDBException.buildExceptionForErrorState((String) error); - } - } catch (JSONException e) { - } - throw new InfluxDBException(errorBody.string()); + throw InfluxDBException.buildExceptionForErrorState(errorBody.string()); } } catch (IOException e) { throw new InfluxDBIOException(e); diff --git a/src/main/java/org/influxdb/impl/RetryCapableBatchWriter.java b/src/main/java/org/influxdb/impl/RetryCapableBatchWriter.java index e5ec0fe42..141b215de 100644 --- a/src/main/java/org/influxdb/impl/RetryCapableBatchWriter.java +++ b/src/main/java/org/influxdb/impl/RetryCapableBatchWriter.java @@ -62,8 +62,10 @@ private WriteResult(final InfluxDBException e) { } } + /* This method is synchronized to avoid parallel execution when the user invokes flush/close + * of the client in the middle of scheduled write execution (buffer flush / action limit overrun) */ @Override - public void write(final Collection collection) { + public synchronized void write(final Collection collection) { // empty the cached data first ListIterator batchQueueIterator = batchQueue.listIterator(); while (batchQueueIterator.hasNext()) { @@ -107,8 +109,10 @@ public void write(final Collection collection) { } } + /* This method is synchronized to avoid parallel execution when the BatchProcessor scheduler + * has been shutdown but there are jobs still being executed (using RetryCapableBatchWriter.write).*/ @Override - public void close() { + public synchronized void close() { // try to write everything queued / buffered for (BatchPoints points : batchQueue) { WriteResult result = tryToWrite(points); diff --git a/src/test/java/org/influxdb/impl/RetryCapableBatchWriterTest.java b/src/test/java/org/influxdb/impl/RetryCapableBatchWriterTest.java index 4e635556e..0777fc89e 100644 --- a/src/test/java/org/influxdb/impl/RetryCapableBatchWriterTest.java +++ b/src/test/java/org/influxdb/impl/RetryCapableBatchWriterTest.java @@ -42,8 +42,8 @@ public void test() { BatchPoints bp3 = getBP(8); BatchPoints bp4 = getBP(100); - Exception nonRecoverable = InfluxDBException.buildExceptionForErrorState("database not found: cvfdgf"); - Exception recoverable = InfluxDBException.buildExceptionForErrorState("cache-max-memory-size exceeded 104/1400"); + Exception nonRecoverable = InfluxDBException.buildExceptionForErrorState("{ \"error\": \"database not found: cvfdgf\" }"); + Exception recoverable = InfluxDBException.buildExceptionForErrorState("{ \"error\": \"cache-max-memory-size exceeded 104/1400\" }"); Mockito.doThrow(nonRecoverable).when(mockInfluxDB).write(bp0); Mockito.doThrow(recoverable).when(mockInfluxDB).write(bp1); Mockito.doThrow(recoverable).when(mockInfluxDB).write(bp2); From 93fd32fea6a1639c718311d53f8d6801af096d06 Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Thu, 1 Feb 2018 13:11:05 +0700 Subject: [PATCH 132/745] + fix BatchOptionsTest + add mote tests for RetryCapableBatchWriterTest --- .../java/org/influxdb/BatchOptionsTest.java | 261 ++++++++++++------ src/test/java/org/influxdb/TestAnswer.java | 27 ++ .../impl/RetryCapableBatchWriterTest.java | 90 +++++- 3 files changed, 299 insertions(+), 79 deletions(-) create mode 100644 src/test/java/org/influxdb/TestAnswer.java diff --git a/src/test/java/org/influxdb/BatchOptionsTest.java b/src/test/java/org/influxdb/BatchOptionsTest.java index 84c33e66a..611bfa2fb 100644 --- a/src/test/java/org/influxdb/BatchOptionsTest.java +++ b/src/test/java/org/influxdb/BatchOptionsTest.java @@ -17,6 +17,7 @@ import static org.mockito.Mockito.*; import java.io.IOException; +import java.text.MessageFormat; import java.util.concurrent.Executors; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; @@ -74,14 +75,12 @@ public void accept(Iterable points, Throwable throwable) { /** * Test the implementation of {@link BatchOptions#actions(int)} }. - * TODO: try to make this run faster, set lower flush duration (eg. 100) - * TODO: you have to test that the points are not in the DB before flush duration as well */ @Test public void testActionsSetting() throws InterruptedException { String dbName = "write_unittest_" + System.currentTimeMillis(); try { - BatchOptions options = BatchOptions.DEFAULTS.actions(3); + BatchOptions options = BatchOptions.DEFAULTS.actions(3).flushDuration(100); this.influxDB.enableBatch(options); this.influxDB.createDatabase(dbName); @@ -94,9 +93,20 @@ public void testActionsSetting() throws InterruptedException { .addField("system", 3.0 * j).build(); this.influxDB.write(point); } - Thread.sleep(500); - QueryResult result=influxDB.query(new Query("select * from cpu", dbName)); - Assertions.assertEquals(3, result.getResults().get(0).getSeries().get(0).getValues().size()); + + //wait for at least one flush period + Thread.sleep(200); + //test at least 3 points was written + QueryResult result = influxDB.query(new Query("select * from cpu", dbName)); + int size = result.getResults().get(0).getSeries().get(0).getValues().size(); + Assertions.assertTrue(size >= 3, "there must be be at least 3 points written"); + + //wait for at least one flush period + Thread.sleep(200); + + //test all 5 points was written + result = influxDB.query(new Query("select * from cpu", dbName)); + Assertions.assertEquals(5, result.getResults().get(0).getSeries().get(0).getValues().size()); } finally { this.influxDB.disableBatch(); @@ -107,24 +117,27 @@ public void testActionsSetting() throws InterruptedException { /** * Test the implementation of {@link BatchOptions#flushDuration(int)} }. * @throws InterruptedException - * TODO: Remove this completely it doesn't test anything new compared to testActionSetting */ @Test public void testFlushDuration() throws InterruptedException { String dbName = "write_unittest_" + System.currentTimeMillis(); try { - BatchOptions options = BatchOptions.DEFAULTS.flushDuration(500); + BatchOptions options = BatchOptions.DEFAULTS.flushDuration(200); influxDB.createDatabase(dbName); influxDB.setDatabase(dbName); influxDB.enableBatch(options); write20Points(influxDB); + //check no points writen to DB before the flush duration QueryResult result = influxDB.query(new Query("select * from weather", dbName)); Assertions.assertNull(result.getResults().get(0).getSeries()); Assertions.assertNull(result.getResults().get(0).getError()); - Thread.sleep(1000); + //wait for at least one flush + Thread.sleep(500); result = influxDB.query(new Query("select * from weather", dbName)); + + //check points written already to DB Assertions.assertEquals(20, result.getResults().get(0).getSeries().get(0).getValues().size()); } finally { @@ -136,26 +149,26 @@ public void testFlushDuration() throws InterruptedException { /** * Test the implementation of {@link BatchOptions#jitterDuration(int)} }. * @throws InterruptedException - * TODO: Make this run faster, set flush duration to 100. Set jitter interval to 500. - * TODO: Test that after 100ms the points are not in the DB yet. */ @Test public void testJitterDuration() throws InterruptedException { - String dbName = "write_unittest_" + System.currentTimeMillis(); try { - BatchOptions options = BatchOptions.DEFAULTS.flushDuration(1000).jitterDuration(125); + BatchOptions options = BatchOptions.DEFAULTS.flushDuration(100).jitterDuration(500); influxDB.createDatabase(dbName); influxDB.setDatabase(dbName); influxDB.enableBatch(options); write20Points(influxDB); + Thread.sleep(100); + QueryResult result = influxDB.query(new Query("select * from weather", dbName)); Assertions.assertNull(result.getResults().get(0).getSeries()); Assertions.assertNull(result.getResults().get(0).getError()); - Thread.sleep(1125); + //wait for at least one flush + Thread.sleep(500); result = influxDB.query(new Query("select * from weather", dbName)); Assertions.assertEquals(20, result.getResults().get(0).getSeries().get(0).getValues().size()); } @@ -176,60 +189,123 @@ public void testNegativeJitterDuration() { Assertions.assertThrows(IllegalArgumentException.class, () -> { BatchOptions options = BatchOptions.DEFAULTS.jitterDuration(-10); influxDB.enableBatch(options); - // TODO: the lines below has no function, remove it - // TODO: you should use dedicated influxDB object so that other tests are not influenced - influxDB.disableBatch(); - options = BatchOptions.DEFAULTS.jitterDuration(0); - influxDB.enableBatch(); influxDB.disableBatch(); }); } - - private void doTestBufferLimit(int bufferLimit, int actions) throws InterruptedException { + /** + * Test the implementation of {@link BatchOptions#bufferLimit(int)} }. + * use a bufferLimit that less than actions, then OneShotBatchWrite is used + */ + @Test + public void testBufferLimitLessThanActions() throws InterruptedException { + + TestAnswer answer = new TestAnswer() { + + InfluxDBException influxDBException = InfluxDBException.buildExceptionForErrorState(createErrorBody(InfluxDBException.CACHE_MAX_MEMORY_SIZE_EXCEEDED_ERROR)); + @Override + protected void check(InvocationOnMock invocation) { + if ((Boolean) params.get("throwException")) { + throw influxDBException; + } + } + }; + + InfluxDB spy = spy(influxDB); + //the spied influxDB.write(BatchPoints) will always throw InfluxDBException + doAnswer(answer).when(spy).write(any(BatchPoints.class)); + String dbName = "write_unittest_" + System.currentTimeMillis(); try { - BatchOptions options = BatchOptions.DEFAULTS.bufferLimit(bufferLimit).actions(actions); + answer.params.put("throwException", true); + BiConsumer, Throwable> mockHandler = mock(BiConsumer.class); + BatchOptions options = BatchOptions.DEFAULTS.bufferLimit(3).actions(4).flushDuration(100).exceptionHandler(mockHandler); - influxDB.createDatabase(dbName); - influxDB.setDatabase(dbName); - influxDB.enableBatch(options); - write20Points(influxDB); + spy.createDatabase(dbName); + spy.setDatabase(dbName); + spy.enableBatch(options); + write20Points(spy); - QueryResult result = influxDB.query(new Query("select * from weather", dbName)); - Thread.sleep(1000); - result = influxDB.query(new Query("select * from weather", dbName)); + Thread.sleep(300); + verify(mockHandler, atLeastOnce()).accept(any(), any()); + + QueryResult result = spy.query(new Query("select * from weather", dbName)); + //assert 0 point written because of InfluxDBException and OneShotBatchWriter did not retry + Assertions.assertNull(result.getResults().get(0).getSeries()); + Assertions.assertNull(result.getResults().get(0).getError()); + + answer.params.put("throwException", false); + write20Points(spy); + Thread.sleep(300); + result = spy.query(new Query("select * from weather", dbName)); + //assert all 20 points written to DB due to no exception Assertions.assertEquals(20, result.getResults().get(0).getSeries().get(0).getValues().size()); } finally { - influxDB.disableBatch(); - influxDB.deleteDatabase(dbName); + spy.disableBatch(); + spy.deleteDatabase(dbName); } - } - - - /** - * Test the implementation of {@link BatchOptions#bufferLimit(int)} }. - * TODO: Name the test functions as it is clear what is it testing - * TODO: Remove this test until the test scenario is clear - */ - @Test - public void testBufferLimit1() throws InterruptedException { - - doTestBufferLimit(3, 4); } /** * Test the implementation of {@link BatchOptions#bufferLimit(int)} }. - * TODO: Name the test functions as it is clear what is it testing - * TODO: Remove this test until the test scenario is clear + * use a bufferLimit that greater than actions, then RetryCapableBatchWriter is used */ @Test - public void testBufferLimit2() throws InterruptedException { - - doTestBufferLimit(10, 4); + public void testBufferLimitGreaterThanActions() throws InterruptedException { + TestAnswer answer = new TestAnswer() { + + int nthCall = 0; + InfluxDBException cacheMaxMemorySizeExceededException = InfluxDBException.buildExceptionForErrorState(createErrorBody(InfluxDBException.CACHE_MAX_MEMORY_SIZE_EXCEEDED_ERROR)); + @Override + protected void check(InvocationOnMock invocation) { + + switch (nthCall++) { + case 0: + throw InfluxDBException.buildExceptionForErrorState(createErrorBody(InfluxDBException.DATABASE_NOT_FOUND_ERROR)); + case 1: + throw InfluxDBException.buildExceptionForErrorState(createErrorBody(InfluxDBException.CACHE_MAX_MEMORY_SIZE_EXCEEDED_ERROR)); + default: + break; + } + } + }; + + InfluxDB spy = spy(influxDB); + doAnswer(answer).when(spy).write(any(BatchPoints.class)); + String dbName = "write_unittest_" + System.currentTimeMillis(); + try { + BiConsumer, Throwable> mockHandler = mock(BiConsumer.class); + BatchOptions options = BatchOptions.DEFAULTS.bufferLimit(10).actions(8).flushDuration(100).exceptionHandler(mockHandler); + + spy.createDatabase(dbName); + spy.setDatabase(dbName); + spy.enableBatch(options); + writeSomePoints(spy, "measurement1", 0, 5); + + Thread.sleep(300); + verify(mockHandler, atLeastOnce()).accept(any(), any()); + + QueryResult result = spy.query(new Query("select * from measurement1", dbName)); + //assert 0 point written because of non-retry capable DATABASE_NOT_FOUND_ERROR and RetryCapableBatchWriter did not retry + Assertions.assertNull(result.getResults().get(0).getSeries()); + Assertions.assertNull(result.getResults().get(0).getError()); + + writeSomePoints(spy, "measurement2", 0, 5); + + Thread.sleep(300); + + result = spy.query(new Query("select * from measurement2", dbName)); + //assert all 6 point written because of retry capable CACHE_MAX_MEMORY_SIZE_EXCEEDED_ERROR and RetryCapableBatchWriter did retry + Assertions.assertEquals(6, result.getResults().get(0).getSeries().get(0).getValues().size()); + } + finally { + spy.disableBatch(); + spy.deleteDatabase(dbName); + } + } /** * Test the implementation of {@link BatchOptions#bufferLimit(int)} }. @@ -240,11 +316,6 @@ public void testNegativeBufferLimit() { Assertions.assertThrows(IllegalArgumentException.class, () -> { BatchOptions options = BatchOptions.DEFAULTS.bufferLimit(-10); influxDB.enableBatch(options); - // TODO: the lines below has no function, remove it - // TODO: you should use dedicated influxDB object so that other tests are not influenced - influxDB.disableBatch(); - options = BatchOptions.DEFAULTS.bufferLimit(0); - influxDB.enableBatch(); influxDB.disableBatch(); }); } @@ -252,24 +323,31 @@ public void testNegativeBufferLimit() { /** * Test the implementation of {@link BatchOptions#threadFactory(ThreadFactory)} }. * @throws InterruptedException - * TODO: there is no assertion testing that the thread factory is used - * TODO: make this test run faster eg. 100ms */ @Test public void testThreadFactory() throws InterruptedException { String dbName = "write_unittest_" + System.currentTimeMillis(); try { - BatchOptions options = BatchOptions.DEFAULTS.threadFactory((r) -> { - return new Thread(r); + ThreadFactory spy = spy(new ThreadFactory() { + + ThreadFactory threadFactory = Executors.defaultThreadFactory(); + @Override + public Thread newThread(Runnable r) { + return threadFactory.newThread(r); + } }); + BatchOptions options = BatchOptions.DEFAULTS.threadFactory(spy).flushDuration(100); influxDB.createDatabase(dbName); influxDB.setDatabase(dbName); influxDB.enableBatch(options); write20Points(influxDB); - Thread.sleep(3000); + Thread.sleep(500); + //Test the thread factory is used somewhere + verify(spy, atLeastOnce()).newThread(any()); + QueryResult result = influxDB.query(new Query("select * from weather", dbName)); Assertions.assertEquals(20, result.getResults().get(0).getSeries().get(0).getValues().size()); } finally { @@ -282,7 +360,6 @@ public void testThreadFactory() throws InterruptedException { /** * Test the implementation of {@link BatchOptions#exceptionHandler(BiConsumer)} }. * @throws InterruptedException - * TODO: make this test run faster eg. 100ms */ @Test public void testHandlerOnRetryImpossible() throws InterruptedException { @@ -293,7 +370,7 @@ public void testHandlerOnRetryImpossible() throws InterruptedException { try { BiConsumer, Throwable> mockHandler = mock(BiConsumer.class); - BatchOptions options = BatchOptions.DEFAULTS.exceptionHandler(mockHandler); + BatchOptions options = BatchOptions.DEFAULTS.exceptionHandler(mockHandler).flushDuration(100); spy.createDatabase(dbName); spy.setDatabase(dbName); @@ -301,7 +378,7 @@ public void testHandlerOnRetryImpossible() throws InterruptedException { writeSomePoints(spy, 1); - Thread.sleep(3000); + Thread.sleep(200); verify(mockHandler, times(1)).accept(any(), any()); QueryResult result = influxDB.query(new Query("select * from weather", dbName)); @@ -317,7 +394,6 @@ public void testHandlerOnRetryImpossible() throws InterruptedException { /** * Test the implementation of {@link BatchOptions#exceptionHandler(BiConsumer)} }. * @throws InterruptedException - * TODO: make this test run faster eg. 100ms */ @Test public void testHandlerOnRetryPossible() throws InterruptedException { @@ -339,7 +415,7 @@ public Object answer(InvocationOnMock invocation) throws Throwable { try { BiConsumer, Throwable> mockHandler = mock(BiConsumer.class); - BatchOptions options = BatchOptions.DEFAULTS.exceptionHandler(mockHandler); + BatchOptions options = BatchOptions.DEFAULTS.exceptionHandler(mockHandler).flushDuration(100); spy.createDatabase(dbName); spy.setDatabase(dbName); @@ -347,7 +423,7 @@ public Object answer(InvocationOnMock invocation) throws Throwable { writeSomePoints(spy, 1); - Thread.sleep(5000); + Thread.sleep(500); verify(mockHandler, never()).accept(any(), any()); verify(spy, times(2)).write(any(BatchPoints.class)); @@ -366,32 +442,57 @@ public Object answer(InvocationOnMock invocation) throws Throwable { /** * Test the implementation of {@link BatchOptions#consistency(InfluxDB.ConsistencyLevel)} }. * @throws InterruptedException - * TODO: there is no assertion testing that the consistency value set is propagated to InfluxDB - * TODO: make this test run faster eg. 100ms */ @Test public void testConsistency() throws InterruptedException { String dbName = "write_unittest_" + System.currentTimeMillis(); - influxDB.createDatabase(dbName); - influxDB.setDatabase(dbName); + + InfluxDB spy = spy(influxDB); + spy.createDatabase(dbName); + spy.setDatabase(dbName); try { - int n = 5; - for (ConsistencyLevel consistencyLevel : ConsistencyLevel.values()) { - BatchOptions options = BatchOptions.DEFAULTS.consistency(consistencyLevel); + TestAnswer answer = new TestAnswer() { + @Override + protected void check(InvocationOnMock invocation) { + BatchPoints batchPoints = (BatchPoints) invocation.getArgument(0); + Assertions.assertEquals(params.get("consistencyLevel"), batchPoints.getConsistency()); + + } + }; + doAnswer(answer).when(spy).write(any(BatchPoints.class)); - influxDB.enableBatch(options); - writeSomePoints(influxDB, n); + int n = 0; + for (final ConsistencyLevel consistencyLevel : ConsistencyLevel.values()) { + answer.params.put("consistencyLevel", consistencyLevel); + BatchOptions options = BatchOptions.DEFAULTS.consistency(consistencyLevel).flushDuration(100); + spy.enableBatch(options); + Assertions.assertEquals(options.getConsistency(), consistencyLevel); + + writeSomePoints(spy, n, n + 4); + n += 5; + Thread.sleep(300); - Thread.sleep(2000); - QueryResult result = influxDB.query(new Query("select * from weather", dbName)); + verify(spy, atLeastOnce()).write(any(BatchPoints.class)); + QueryResult result = spy.query(new Query("select * from weather", dbName)); Assertions.assertEquals(n, result.getResults().get(0).getSeries().get(0).getValues().size()); - n += 5; - this.influxDB.disableBatch(); + + spy.disableBatch(); } } finally { - this.influxDB.deleteDatabase(dbName); + spy.deleteDatabase(dbName); + } + } + + private void writeSomePoints(InfluxDB influxDB, String measurement, int firstIndex, int lastIndex) { + for (int i = firstIndex; i <= lastIndex; i++) { + Point point = Point.measurement(measurement) + .time(i,TimeUnit.HOURS) + .addField("field1", (double) i) + .addField("field2", (double) (i) * 1.1) + .addField("field3", "moderate").build(); + influxDB.write(point); } } @@ -413,4 +514,8 @@ private void write20Points(InfluxDB influxDB) { private void writeSomePoints(InfluxDB influxDB, int n) { writeSomePoints(influxDB, 0, n - 1); } + + private static String createErrorBody(String errorMessage) { + return MessageFormat.format("'{' \"error\": \"{0}\" '}'", errorMessage); + } } diff --git a/src/test/java/org/influxdb/TestAnswer.java b/src/test/java/org/influxdb/TestAnswer.java new file mode 100644 index 000000000..8b0a2cd41 --- /dev/null +++ b/src/test/java/org/influxdb/TestAnswer.java @@ -0,0 +1,27 @@ +package org.influxdb; + +import java.lang.reflect.Modifier; +import java.util.HashMap; +import java.util.Map; + +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; + +public abstract class TestAnswer implements Answer { + + Map params = new HashMap<>(); + + protected abstract void check(InvocationOnMock invocation); + + @Override + public Object answer(InvocationOnMock invocation) throws Throwable { + check(invocation); + //call only non-abstract real method + if (Modifier.isAbstract(invocation.getMethod().getModifiers())) { + return null; + } else { + return invocation.callRealMethod(); + } + } + +} \ No newline at end of file diff --git a/src/test/java/org/influxdb/impl/RetryCapableBatchWriterTest.java b/src/test/java/org/influxdb/impl/RetryCapableBatchWriterTest.java index 0777fc89e..e4b9ec020 100644 --- a/src/test/java/org/influxdb/impl/RetryCapableBatchWriterTest.java +++ b/src/test/java/org/influxdb/impl/RetryCapableBatchWriterTest.java @@ -2,16 +2,20 @@ import org.influxdb.InfluxDB; import org.influxdb.InfluxDBException; +import org.influxdb.TestAnswer; import org.influxdb.dto.BatchPoints; import org.influxdb.dto.Point; import org.junit.Assert; +import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.junit.platform.runner.JUnitPlatform; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; +import org.mockito.invocation.InvocationOnMock; -import java.util.Collection; +import java.text.MessageFormat; +import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.function.BiConsumer; @@ -86,4 +90,88 @@ public void test() { Assert.assertEquals(capturedArgument2.get(1).getPoints().size(), 100); } + + @Test + public void testAllNonRecoverableExceptions() { + + InfluxDB mockInfluxDB = mock(InfluxDBImpl.class); + BiConsumer errorHandler = mock(BiConsumer.class); + RetryCapableBatchWriter rw = new RetryCapableBatchWriter(mockInfluxDB, errorHandler, + 150, 100); + + InfluxDBException nonRecoverable1 = InfluxDBException.buildExceptionForErrorState(createErrorBody("database not found: cvfdgf")); + InfluxDBException nonRecoverable2 = InfluxDBException.buildExceptionForErrorState(createErrorBody("points beyond retention policy 'abc'")); + InfluxDBException nonRecoverable3 = InfluxDBException.buildExceptionForErrorState(createErrorBody("unable to parse 'abc'")); + InfluxDBException nonRecoverable4 = InfluxDBException.buildExceptionForErrorState(createErrorBody("hinted handoff queue not empty service='abc'")); + InfluxDBException nonRecoverable5 = InfluxDBException.buildExceptionForErrorState(createErrorBody("field type conflict 'abc'")); + InfluxDBException nonRecoverable6 = new InfluxDBException.RetryBufferOverrunException(createErrorBody("Retry BufferOverrun Exception")); + InfluxDBException nonRecoverable7 = InfluxDBException.buildExceptionForErrorState(createErrorBody("user is not authorized to write to database")); + + List exceptions = Arrays.asList(nonRecoverable1, nonRecoverable2, nonRecoverable3, + nonRecoverable4, nonRecoverable5, nonRecoverable6, nonRecoverable7); + int size = exceptions.size(); + doAnswer(new TestAnswer() { + int i = 0; + @Override + protected void check(InvocationOnMock invocation) { + if (i < size) { + throw exceptions.get(i++); + } + } + }).when(mockInfluxDB).write(any(BatchPoints.class)); + + BatchPoints bp = getBP(8); + for (int i = 0; i < size; i++) { + rw.write(Collections.singletonList(bp)); + } + verify(errorHandler, times(size)).accept(any(), any());; + } + + @Test + public void testClosingWriter() { + InfluxDB mockInfluxDB = mock(InfluxDB.class); + BiConsumer, Throwable> errorHandler = mock(BiConsumer.class); + + BatchPoints bp5 = getBP(5); + BatchPoints bp6 = getBP(6); + BatchPoints bp90 = getBP(90); + + doAnswer(new TestAnswer() { + int i = 0; + @Override + protected void check(InvocationOnMock invocation) { + //first 4 calls + if (i++ < 4) { + throw InfluxDBException.buildExceptionForErrorState("cache-max-memory-size exceeded 104/1400"); + } + return; + } + }).when(mockInfluxDB).write(any(BatchPoints.class)); + + RetryCapableBatchWriter rw = new RetryCapableBatchWriter(mockInfluxDB, errorHandler, + 150, 100); + + rw.write(Collections.singletonList(bp5)); + rw.write(Collections.singletonList(bp6)); + rw.write(Collections.singletonList(bp90)); + //recoverable exception -> never errorHandler + verify(errorHandler, never()).accept(any(), any()); + verify(mockInfluxDB, times(3)).write(any(BatchPoints.class)); + + rw.close(); + + ArgumentCaptor captor4Write = ArgumentCaptor.forClass(BatchPoints.class); + ArgumentCaptor> captor4Accept = ArgumentCaptor.forClass(List.class); + verify(errorHandler, times(1)).accept(captor4Accept.capture(), any()); + verify(mockInfluxDB, times(5)).write(captor4Write.capture()); + + //bp5 and bp6 were merged and writing of the merged batch points on closing should be failed + Assertions.assertEquals(11, captor4Accept.getValue().size()); + //bp90 was written because no more exception thrown + Assertions.assertEquals(90, captor4Write.getAllValues().get(4).getPoints().size()); + } + + private static String createErrorBody(String errorMessage) { + return MessageFormat.format("'{' \"error\": \"{0}\" '}'", errorMessage); + } } From 21e335025f13aaff951fadcaa8c20bf573997439 Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Thu, 1 Feb 2018 17:28:16 +0700 Subject: [PATCH 133/745] add testRetryingKeepChronologicalOrder, test RetryCapableBatchWriterTest is aware of and guarantees the chronological order of the written batchpoints --- .../impl/RetryCapableBatchWriterTest.java | 55 +++++++++++++++++++ 1 file changed, 55 insertions(+) diff --git a/src/test/java/org/influxdb/impl/RetryCapableBatchWriterTest.java b/src/test/java/org/influxdb/impl/RetryCapableBatchWriterTest.java index e4b9ec020..189f4d6d1 100644 --- a/src/test/java/org/influxdb/impl/RetryCapableBatchWriterTest.java +++ b/src/test/java/org/influxdb/impl/RetryCapableBatchWriterTest.java @@ -18,6 +18,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.concurrent.TimeUnit; import java.util.function.BiConsumer; import static org.mockito.Mockito.mock; @@ -171,6 +172,60 @@ protected void check(InvocationOnMock invocation) { Assertions.assertEquals(90, captor4Write.getAllValues().get(4).getPoints().size()); } + @Test + public void testRetryingKeepChronologicalOrder() { + + BatchPoints.Builder b = BatchPoints.database("d1"); + for (int i = 0; i < 200; i++) { + b.point(Point.measurement("x1").time(1,TimeUnit.HOURS). + addField("x", 1). + tag("t", "v1").build()).build(); + } + + BatchPoints bp1 = b.build(); + + b = BatchPoints.database("d1"); + + b.point(Point.measurement("x1").time(1,TimeUnit.HOURS). + addField("x", 2). + tag("t", "v2").build()).build(); + + for (int i = 0; i < 199; i++) { + b.point(Point.measurement("x1").time(2,TimeUnit.HOURS). + addField("x", 2). + tag("t", "v2").build()).build(); + } + BatchPoints bp2 = b.build(); + + InfluxDB mockInfluxDB = mock(InfluxDB.class); + BiConsumer, Throwable> errorHandler = mock(BiConsumer.class); + RetryCapableBatchWriter rw = new RetryCapableBatchWriter(mockInfluxDB, errorHandler, + 450, 150); + doAnswer(new TestAnswer() { + int i = 0; + @Override + protected void check(InvocationOnMock invocation) { + if (i++ < 1) { + throw InfluxDBException.buildExceptionForErrorState("cache-max-memory-size exceeded 104/1400"); + } + return; + } + }).when(mockInfluxDB).write(any(BatchPoints.class)); + + rw.write(Collections.singletonList(bp1)); + rw.write(Collections.singletonList(bp2)); + + ArgumentCaptor captor4Write = ArgumentCaptor.forClass(BatchPoints.class); + verify(mockInfluxDB, times(3)).write(captor4Write.capture()); + + //bp1 written but failed because of recoverable cache-max-memory-size error + Assertions.assertEquals(bp1, captor4Write.getAllValues().get(0)); + //bp1 rewritten on writing of bp2 + Assertions.assertEquals(bp1, captor4Write.getAllValues().get(1)); + //bp2 written + Assertions.assertEquals(bp2, captor4Write.getAllValues().get(2)); + + } private static String createErrorBody(String errorMessage) { return MessageFormat.format("'{' \"error\": \"{0}\" '}'", errorMessage); } From b664dd0fd6848309920f85c71917c3e5d0b49344 Mon Sep 17 00:00:00 2001 From: dubsky Date: Thu, 1 Feb 2018 20:40:41 +0100 Subject: [PATCH 134/745] adding very basic jitter interval test --- .../java/org/influxdb/BatchOptionsTest.java | 46 +++++++++++++++++++ 1 file changed, 46 insertions(+) diff --git a/src/test/java/org/influxdb/BatchOptionsTest.java b/src/test/java/org/influxdb/BatchOptionsTest.java index 7abd67532..e1311dcf2 100644 --- a/src/test/java/org/influxdb/BatchOptionsTest.java +++ b/src/test/java/org/influxdb/BatchOptionsTest.java @@ -92,4 +92,50 @@ public void testActionsSetting() throws InterruptedException { } } + /** + * Test the implementation of {@link BatchOptions#jitterDuration(int)} }. + * @throws InterruptedException + */ + @Test + public void testJitterDuration() throws InterruptedException { + + String dbName = "write_unittest_" + System.currentTimeMillis(); + try { + BatchOptions options = BatchOptions.DEFAULTS.flushDuration(100).jitterDuration(500); + influxDB.createDatabase(dbName); + influxDB.setDatabase(dbName); + influxDB.enableBatch(options); + write20Points(influxDB); + + Thread.sleep(100); + + QueryResult result = influxDB.query(new Query("select * from weather", dbName)); + Assertions.assertNull(result.getResults().get(0).getSeries()); + Assertions.assertNull(result.getResults().get(0).getError()); + + //wait for at least one flush + Thread.sleep(500); + result = influxDB.query(new Query("select * from weather", dbName)); + Assertions.assertEquals(20, result.getResults().get(0).getSeries().get(0).getValues().size()); + } + finally { + influxDB.disableBatch(); + influxDB.deleteDatabase(dbName); + } + } + + private void writeSomePoints(InfluxDB influxDB, int firstIndex, int lastIndex) { + for (int i = firstIndex; i <= lastIndex; i++) { + Point point = Point.measurement("weather") + .time(i,TimeUnit.HOURS) + .addField("temperature", (double) i) + .addField("humidity", (double) (i) * 1.1) + .addField("uv_index", "moderate").build(); + influxDB.write(point); + } + } + + private void write20Points(InfluxDB influxDB) { + writeSomePoints(influxDB, 0, 19); + } } From a189aade5b8ade810d846fe41d74067e50b3bbbe Mon Sep 17 00:00:00 2001 From: dubsky Date: Thu, 1 Feb 2018 21:35:24 +0100 Subject: [PATCH 135/745] making the jitter duration test more durable for Travis CI --- .../java/org/influxdb/BatchOptionsTest.java | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/src/test/java/org/influxdb/BatchOptionsTest.java b/src/test/java/org/influxdb/BatchOptionsTest.java index e1311dcf2..11c255e37 100644 --- a/src/test/java/org/influxdb/BatchOptionsTest.java +++ b/src/test/java/org/influxdb/BatchOptionsTest.java @@ -108,15 +108,26 @@ public void testJitterDuration() throws InterruptedException { write20Points(influxDB); Thread.sleep(100); + System.out.println("Jitter duration test wrote points"); QueryResult result = influxDB.query(new Query("select * from weather", dbName)); Assertions.assertNull(result.getResults().get(0).getSeries()); Assertions.assertNull(result.getResults().get(0).getError()); - //wait for at least one flush + System.out.println("Waiting for jitter to expire"); Thread.sleep(500); - result = influxDB.query(new Query("select * from weather", dbName)); - Assertions.assertEquals(20, result.getResults().get(0).getSeries().get(0).getValues().size()); + + //wait for at least one flush + for(int i=0;i<10;i++) { + if(i==9) throw new RuntimeException("waited for too long"); + result = influxDB.query(new Query("select * from weather", dbName)); + if(result.getResults().get(0).getSeries()!=null) { + System.out.println("Jitter duration result series "+i+"/"+result.getResults().get(0).getSeries()); + Assertions.assertEquals(20, result.getResults().get(0).getSeries().get(0).getValues().size()); + break; + } + Thread.sleep(100); + } } finally { influxDB.disableBatch(); From a686b891ffc674e56ef16aea22ab8aef02f3c6a7 Mon Sep 17 00:00:00 2001 From: dubsky Date: Thu, 1 Feb 2018 22:26:20 +0100 Subject: [PATCH 136/745] fixing 'write compare udp performance'; test will run longer and will not count database deletion into results --- .../java/org/influxdb/PerformanceTests.java | 65 ++++++++++--------- 1 file changed, 36 insertions(+), 29 deletions(-) diff --git a/src/test/java/org/influxdb/PerformanceTests.java b/src/test/java/org/influxdb/PerformanceTests.java index e7c363d0e..cd10298ed 100644 --- a/src/test/java/org/influxdb/PerformanceTests.java +++ b/src/test/java/org/influxdb/PerformanceTests.java @@ -106,36 +106,43 @@ public void testMaxWritePointsPerformance() { this.influxDB.deleteDatabase(dbName); } + /** + * states that String.join("\n", records)*/ @Test public void testWriteCompareUDPPerformanceForBatchWithSinglePoints() { - //prepare data - List lineProtocols = new ArrayList(); - for (int i = 0; i < 1000; i++) { - Point point = Point.measurement("udp_single_poit").addField("v", i).build(); - lineProtocols.add(point.lineProtocol()); - } - - String dbName = "write_compare_udp_" + System.currentTimeMillis(); - this.influxDB.createDatabase(dbName); - this.influxDB.enableBatch(2000, 100, TimeUnit.MILLISECONDS); - - //write batch of 1000 single string. - long start = System.currentTimeMillis(); - this.influxDB.write(UDP_PORT, lineProtocols); - long elapsedForBatchWrite = System.currentTimeMillis() - start; - System.out.println("performance(ms):write udp with batch of 1000 string:" + elapsedForBatchWrite); - - //write 1000 single string by udp. - start = System.currentTimeMillis(); - for (String lineProtocol: lineProtocols){ - this.influxDB.write(UDP_PORT, lineProtocol); - } - this.influxDB.deleteDatabase(dbName); - - long elapsedForSingleWrite = System.currentTimeMillis() - start; - System.out.println("performance(ms):write udp with 1000 single strings:" + elapsedForSingleWrite); - - Assertions.assertTrue(elapsedForSingleWrite - elapsedForBatchWrite > 0); - } + //prepare data + List lineProtocols = new ArrayList(); + for (int i = 0; i < 2000; i++) { + Point point = Point.measurement("udp_single_poit").addField("v", i).build(); + lineProtocols.add(point.lineProtocol()); + } + + String dbName = "write_compare_udp_" + System.currentTimeMillis(); + this.influxDB.createDatabase(dbName); + this.influxDB.enableBatch(10000, 100, TimeUnit.MILLISECONDS); + + int repetitions = 15; + long start = System.currentTimeMillis(); + for (int i = 0; i < repetitions; i++) { + //write batch of 2000 single string. + this.influxDB.write(UDP_PORT, lineProtocols); + } + long elapsedForBatchWrite = System.currentTimeMillis() - start; + System.out.println("performance(ms):write udp with batch of 1000 string:" + elapsedForBatchWrite); + + // write 2000 single string by udp. + start = System.currentTimeMillis(); + for (int i = 0; i < repetitions; i++) { + for (String lineProtocol : lineProtocols) { + this.influxDB.write(UDP_PORT, lineProtocol); + } + } + + long elapsedForSingleWrite = System.currentTimeMillis() - start; + System.out.println("performance(ms):write udp with 1000 single strings:" + elapsedForSingleWrite); + + this.influxDB.deleteDatabase(dbName); + Assertions.assertTrue(elapsedForSingleWrite - elapsedForBatchWrite > 0); + } } From 6374a5beefe97aec31dd30e9d4943a0b3dad4fb7 Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Wed, 7 Feb 2018 13:54:29 +0700 Subject: [PATCH 137/745] fix testJitterDuration - increase waiting time --- src/test/java/org/influxdb/BatchOptionsTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/org/influxdb/BatchOptionsTest.java b/src/test/java/org/influxdb/BatchOptionsTest.java index 611bfa2fb..df11552c8 100644 --- a/src/test/java/org/influxdb/BatchOptionsTest.java +++ b/src/test/java/org/influxdb/BatchOptionsTest.java @@ -168,7 +168,7 @@ public void testJitterDuration() throws InterruptedException { Assertions.assertNull(result.getResults().get(0).getError()); //wait for at least one flush - Thread.sleep(500); + Thread.sleep(1000); result = influxDB.query(new Query("select * from weather", dbName)); Assertions.assertEquals(20, result.getResults().get(0).getSeries().get(0).getValues().size()); } From b703db457d50fb0717b34b3873311e732d8ad288 Mon Sep 17 00:00:00 2001 From: dubsky Date: Wed, 7 Feb 2018 16:22:35 +0100 Subject: [PATCH 138/745] PR #409 changes documented --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3ef151b3c..289056baf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,10 @@ ## 2.9 [unreleased] +### Features + +- New extensible API to configure batching properties. [PR #409] +- New configuration property 'jitter interval' to avoid multiple clients hit the server periodically at the same time. [PR #409] ## 2.8 [2017-12-06] From 513c176c3eebd3bd29bf2c9d74f7bca085f37579 Mon Sep 17 00:00:00 2001 From: dubsky Date: Wed, 7 Feb 2018 17:15:15 +0100 Subject: [PATCH 139/745] updating documentation for PR#410 --- CHANGELOG.md | 2 ++ README.md | 12 +++++++++++- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 289056baf..e21aeb30d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,8 @@ - New extensible API to configure batching properties. [PR #409] - New configuration property 'jitter interval' to avoid multiple clients hit the server periodically at the same time. [PR #409] +- New strategy on handling errors, client performs retries writes when server gets overloaded [PR #410] +- New exceptions give the client user easier way to classify errors reported by the server. [PR #410] ## 2.8 [2017-12-06] diff --git a/README.md b/README.md index 40688258f..5e3c2e714 100644 --- a/README.md +++ b/README.md @@ -65,6 +65,16 @@ influxDB.enableBatch(BatchOptions.DEFAULTS.exceptionHandler( ); ``` +With batching enabled the client provides two strategies how to deal with errors thrown by the InfluxDB server. + + 1. 'One shot' write - on failed write request to InfluxDB server an error is reported to the client using the means mentioned above. + 2. 'Retry on error' write (used by default) - on failed write the request by the client is repeated after batchInterval elapses + (if there is a chance the write will succeed - the error was caused by overloading the server, a network error etc.) + When new data points are written before the previous (failed) points are successfully written, those are queued inside the client + and wait until older data points are successfully written. + Size of this queue is limited and configured by `BatchOptions.bufferLimit` property. When the limit is reached, the oldest points + in the queue are dropped. 'Retry on error' strategy is used when individual write batch size defined by `BatchOptions.actions` is lower than `BatchOptions.bufferLimit`. + Note: * Batching functionality creates an internal thread pool that needs to be shutdown explicitly as part of a graceful application shut-down, or the application will not shut down properly. To do so simply call: ```influxDB.close()``` * `InfluxDB.enableBatch(BatchOptions)` is available since version 2.9. Prior versions use `InfluxDB.enableBatch(actions, flushInterval, timeUnit)` or similar based on the configuration parameters you want to set. @@ -72,7 +82,6 @@ Note: * If you are using influxdb < 2.8, you should use retention policy: 'autogen' * If you are using influxdb < 1.0.0, you should use 'default' instead of 'autogen' - If your points are written into different databases and retention policies, the more complex InfluxDB.write() methods can be used: ```java @@ -105,6 +114,7 @@ influxDB.dropRetentionPolicy(rpName, dbName); influxDB.deleteDatabase(dbName); influxDB.close(); ``` + #### Synchronous writes From d2ff8243eb5e5f459ae833f96750cef43bd5b087 Mon Sep 17 00:00:00 2001 From: dubsky Date: Wed, 7 Feb 2018 17:37:51 +0100 Subject: [PATCH 140/745] fixing wrong exception name --- src/main/java/org/influxdb/InfluxDBException.java | 6 +++--- src/test/java/org/influxdb/BatchOptionsTest.java | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/main/java/org/influxdb/InfluxDBException.java b/src/main/java/org/influxdb/InfluxDBException.java index 2db463d04..bc09396eb 100644 --- a/src/main/java/org/influxdb/InfluxDBException.java +++ b/src/main/java/org/influxdb/InfluxDBException.java @@ -46,8 +46,8 @@ public boolean isRetryWorth() { static final String AUTHORIZATION_FAILED_ERROR = "authorization failed"; static final String USERNAME_REQUIRED_ERROR = "username required"; - public static final class DatabaseNotFoundError extends InfluxDBException { - private DatabaseNotFoundError(final String message) { + public static final class DatabaseNotFoundException extends InfluxDBException { + private DatabaseNotFoundException(final String message) { super(message); } @@ -128,7 +128,7 @@ public boolean isRetryWorth() { private static InfluxDBException buildExceptionFromErrorMessage(final String errorMessage) { if (errorMessage.contains(DATABASE_NOT_FOUND_ERROR)) { - return new DatabaseNotFoundError(errorMessage); + return new DatabaseNotFoundException(errorMessage); } if (errorMessage.contains(POINTS_BEYOND_RETENTION_POLICY_ERROR)) { return new PointsBeyondRetentionPolicyException(errorMessage); diff --git a/src/test/java/org/influxdb/BatchOptionsTest.java b/src/test/java/org/influxdb/BatchOptionsTest.java index df11552c8..b7d1830a3 100644 --- a/src/test/java/org/influxdb/BatchOptionsTest.java +++ b/src/test/java/org/influxdb/BatchOptionsTest.java @@ -1,7 +1,7 @@ package org.influxdb; import org.influxdb.InfluxDB.ConsistencyLevel; -import org.influxdb.InfluxDBException.DatabaseNotFoundError; +import org.influxdb.InfluxDBException.DatabaseNotFoundException; import org.influxdb.dto.BatchPoints; import org.influxdb.dto.Point; import org.influxdb.dto.Query; @@ -366,7 +366,7 @@ public void testHandlerOnRetryImpossible() throws InterruptedException { String dbName = "write_unittest_" + System.currentTimeMillis(); InfluxDB spy = spy(influxDB); - doThrow(DatabaseNotFoundError.class).when(spy).write(any(BatchPoints.class)); + doThrow(DatabaseNotFoundException.class).when(spy).write(any(BatchPoints.class)); try { BiConsumer, Throwable> mockHandler = mock(BiConsumer.class); From 3562a1a658aa69a2ec3d00dd91a4d2926de02fae Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Tue, 20 Feb 2018 13:45:04 +0700 Subject: [PATCH 141/745] add test for batch writing and retry --- .../java/org/influxdb/PerformanceTests.java | 62 +++++++++++++++++++ 1 file changed, 62 insertions(+) diff --git a/src/test/java/org/influxdb/PerformanceTests.java b/src/test/java/org/influxdb/PerformanceTests.java index cd10298ed..1fd0a0444 100644 --- a/src/test/java/org/influxdb/PerformanceTests.java +++ b/src/test/java/org/influxdb/PerformanceTests.java @@ -3,6 +3,8 @@ import org.influxdb.InfluxDB.LogLevel; import org.influxdb.dto.BatchPoints; import org.influxdb.dto.Point; +import org.influxdb.dto.Query; +import org.influxdb.dto.QueryResult; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Disabled; @@ -10,7 +12,11 @@ import org.junit.jupiter.api.Test; import org.junit.platform.runner.JUnitPlatform; import org.junit.runner.RunWith; +import org.mockito.invocation.InvocationOnMock; +import static org.mockito.Mockito.*; + +import java.net.SocketTimeoutException; import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; @@ -144,5 +150,61 @@ public void testWriteCompareUDPPerformanceForBatchWithSinglePoints() { this.influxDB.deleteDatabase(dbName); Assertions.assertTrue(elapsedForSingleWrite - elapsedForBatchWrite > 0); } + + @Test + public void testRetryWritePointsInBatch() throws InterruptedException { + String dbName = "d"; + + InfluxDB spy = spy(influxDB); + TestAnswer answer = new TestAnswer() { + boolean started = false; + InfluxDBException influxDBException = new InfluxDBException(new SocketTimeoutException()); + @Override + protected void check(InvocationOnMock invocation) { + if (started || System.currentTimeMillis() >= (Long) params.get("startTime")) { + System.out.println("call real"); + started = true; + } else { + System.out.println("throw"); + throw influxDBException; + } + } + }; + + answer.params.put("startTime", System.currentTimeMillis() + 80000); + doAnswer(answer).when(spy).write(any(BatchPoints.class)); + + spy.createDatabase(dbName); + BatchOptions batchOptions = BatchOptions.DEFAULTS.actions(100000).flushDuration(20000).bufferLimit(3000000).exceptionHandler((points, throwable) -> { + System.out.println("+++++++++++ exceptionHandler +++++++++++"); + System.out.println(throwable); + System.out.println("++++++++++++++++++++++++++++++++++++++++"); + }); + + //this.influxDB.enableBatch(100000, 60, TimeUnit.SECONDS); + spy.enableBatch(batchOptions); + String rp = TestUtils.defaultRetentionPolicy(spy.version()); + + for (long i = 0; i < 400000; i++) { + Point point = Point.measurement("s").time(i, TimeUnit.MILLISECONDS).addField("v", 1.0).build(); + spy.write(dbName, rp, point); + } + + System.out.println("sleep"); + Thread.sleep(120000); + try { + QueryResult result = spy.query(new Query("select count(v) from s", dbName)); + double d = Double.parseDouble(result.getResults().get(0).getSeries().get(0).getValues().get(0).get(1).toString()); + Assertions.assertEquals(400000, d); + } catch (Exception e) { + System.out.println("+++++++++++++++++count() +++++++++++++++++++++"); + System.out.println(e); + System.out.println("++++++++++++++++++++++++++++++++++++++++++++++"); + + } + + spy.disableBatch(); + spy.deleteDatabase(dbName); + } } From 7d045c183d6a1d0bf2a697d46e26054c0b96e80b Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Tue, 20 Feb 2018 10:19:10 +0100 Subject: [PATCH 142/745] Update junit from 5.0.2 -> 5.1.0, update mockito from 2.13.0 -> 2.15.0 --- pom.xml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pom.xml b/pom.xml index a791f661e..ce94089a2 100644 --- a/pom.xml +++ b/pom.xml @@ -212,13 +212,13 @@ org.junit.jupiter junit-jupiter-engine - 5.0.2 + 5.1.0 test org.junit.platform junit-platform-runner - 1.0.2 + 1.1.0 test @@ -236,7 +236,7 @@ org.mockito mockito-core - 2.13.0 + 2.15.0 test From 3af477ad6b068a6d070d49a998ec5df5b15b5b1f Mon Sep 17 00:00:00 2001 From: Tomas Klapka Date: Mon, 26 Feb 2018 15:01:32 +0100 Subject: [PATCH 143/745] release preparation --- pom.xml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/pom.xml b/pom.xml index ce94089a2..6ace49203 100644 --- a/pom.xml +++ b/pom.xml @@ -111,6 +111,11 @@ maven-resources-plugin 3.0.2 + + org.apache.maven.plugins + maven-release-plugin + 2.5.3 + @@ -206,6 +211,10 @@ + + org.apache.maven.plugins + maven-release-plugin + From b9a795902df6314ecdbdb79adf827a442f023213 Mon Sep 17 00:00:00 2001 From: Tomas Klapka Date: Mon, 26 Feb 2018 15:03:05 +0100 Subject: [PATCH 144/745] influxdb run script --- run_influxdb.sh | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100755 run_influxdb.sh diff --git a/run_influxdb.sh b/run_influxdb.sh new file mode 100755 index 000000000..2fe5206b9 --- /dev/null +++ b/run_influxdb.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +docker run \ + --detach \ + --name influxdb \ + --publish 8086:8086 \ + --publish 8089:8089/udp \ + --volume ${PWD}/influxdb.conf:/etc/influxdb/influxdb.conf \ + influxdb:latest From 9b200f03f49d06d7bec4b5e53348ce2cbc867a0e Mon Sep 17 00:00:00 2001 From: Tomas Klapka Date: Tue, 27 Feb 2018 11:06:14 +0100 Subject: [PATCH 145/745] final solution for running the release --- pom.xml | 607 +++++++++++++++----------- run_influxdb.sh | 9 - src/main/resources/docker-compose.yml | 11 + 3 files changed, 355 insertions(+), 272 deletions(-) delete mode 100755 run_influxdb.sh create mode 100644 src/main/resources/docker-compose.yml diff --git a/pom.xml b/pom.xml index 6ace49203..dd0d27f4d 100644 --- a/pom.xml +++ b/pom.xml @@ -1,274 +1,355 @@ - - 4.0.0 - org.influxdb - influxdb-java - jar - 2.9-SNAPSHOT - influxdb java bindings - Java API to access the InfluxDB REST API - http://www.influxdb.org + + 4.0.0 + org.influxdb + influxdb-java + jar + 2.9-SNAPSHOT + influxdb java bindings + Java API to access the InfluxDB REST API + http://www.influxdb.org - - 3.2.1 - + + 3.2.1 + - - - The MIT License (MIT) - http://www.opensource.org/licenses/mit-license.php - repo - - - - UTF-8 - + + + The MIT License (MIT) + http://www.opensource.org/licenses/mit-license.php + repo + + + + UTF-8 + - - scm:git:git@github.com:influxdata/influxdb-java.git - scm:git:git@github.com:influxdata/influxdb-java.git - git@github.com:influxdata/influxdb-java.git - + + scm:git:git@github.com:influxdata/influxdb-java.git + scm:git:git@github.com:influxdata/influxdb-java.git + git@github.com:influxdata/influxdb-java.git + - - - majst01 - Stefan Majer - stefan.majer@gmail.com - - - - - - org.codehaus.mojo - findbugs-maven-plugin - 3.0.5 - - true - - target/site - - - - + + + majst01 + Stefan Majer + stefan.majer@gmail.com + + + + + + org.codehaus.mojo + findbugs-maven-plugin + 3.0.5 + + true + + target/site + + + + - - - ossrh - https://oss.sonatype.org/content/repositories/snapshots - - - ossrh - https://oss.sonatype.org/service/local/staging/deploy/maven2/ - - + + + ossrh + https://oss.sonatype.org/content/repositories/snapshots + + + ossrh + https://oss.sonatype.org/service/local/staging/deploy/maven2/ + + - - - - - org.apache.maven.plugins - maven-compiler-plugin - 3.7.0 - - 1.8 - 1.8 - - - - org.apache.maven.plugins - maven-surefire-plugin - 2.20 - - - org.apache.maven.plugins - maven-site-plugin - 3.7 - - - org.apache.maven.plugins - maven-clean-plugin - 3.0.0 - - - org.apache.maven.plugins - maven-deploy-plugin - 2.8.2 - - - org.apache.maven.plugins - maven-install-plugin - 2.5.2 - - - org.apache.maven.plugins - maven-jar-plugin - 3.0.2 - - - org.apache.maven.plugins - maven-resources-plugin - 3.0.2 - - - org.apache.maven.plugins - maven-release-plugin - 2.5.3 - - - - + + + + src/main/resources + + docker-compose.yml + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.7.0 + + 1.8 + 1.8 + + + + org.apache.maven.plugins + maven-surefire-plugin + 2.20 + + + org.apache.maven.plugins + maven-site-plugin + 3.7 + + + org.apache.maven.plugins + maven-clean-plugin + 3.0.0 + + + org.apache.maven.plugins + maven-deploy-plugin + 2.8.2 + + + org.apache.maven.plugins + maven-install-plugin + 2.5.2 + + + org.apache.maven.plugins + maven-jar-plugin + 3.0.2 + + + org.apache.maven.plugins + maven-resources-plugin + 3.0.2 + + + org.apache.maven.plugins + maven-release-plugin + 2.5.3 + + + + - - org.sonatype.plugins - nexus-staging-maven-plugin - 1.6.8 - true - - ossrh - https://oss.sonatype.org/ - true - - - - org.apache.maven.plugins - maven-source-plugin - 3.0.1 - - - attach-sources - - jar-no-fork - - - - + + org.sonatype.plugins + nexus-staging-maven-plugin + 1.6.8 + true + + ossrh + https://oss.sonatype.org/ + true + + + + org.apache.maven.plugins + maven-source-plugin + 3.0.1 + + + attach-sources + + jar-no-fork + + + + - - org.apache.maven.plugins - maven-javadoc-plugin - 3.0.0 - - - attach-javadocs - - jar - - - - - - - org.jacoco - jacoco-maven-plugin - 0.8.0 - - - - prepare-agent - - - - report - test - - report - - - - - - org.apache.maven.plugins - maven-checkstyle-plugin - 3.0.0 - - true - checkstyle.xml - true - - - - verify - - checkstyle - - - - - - org.apache.maven.plugins - maven-release-plugin - - - - - - org.junit.jupiter - junit-jupiter-engine - 5.1.0 - test - - - org.junit.platform - junit-platform-runner - 1.1.0 - test - - - org.hamcrest - hamcrest-all - 1.3 - test - - - org.assertj - assertj-core - 3.9.0 - test - - - org.mockito - mockito-core - 2.15.0 - test - - - com.squareup.retrofit2 - retrofit - 2.3.0 - - - com.squareup.retrofit2 - converter-moshi - 2.3.0 - - - - com.squareup.okhttp3 - okhttp - 3.9.1 - - - com.squareup.okhttp3 - logging-interceptor - 3.9.1 - - + + org.apache.maven.plugins + maven-javadoc-plugin + 3.0.0 + + + attach-javadocs + + jar + + + + + + org.jacoco + jacoco-maven-plugin + 0.8.0 + + + + prepare-agent + + + + report + test + + report + + + + + + org.apache.maven.plugins + maven-checkstyle-plugin + 3.0.0 + + true + checkstyle.xml + true + + + + verify + + checkstyle + + + + + + org.apache.maven.plugins + maven-release-plugin + + + release + + + + + + + org.junit.jupiter + junit-jupiter-engine + 5.1.0 + test + + + org.junit.platform + junit-platform-runner + 1.1.0 + test + + + org.hamcrest + hamcrest-all + 1.3 + test + + + org.assertj + assertj-core + 3.9.0 + test + + + org.mockito + mockito-core + 2.15.0 + test + + + com.squareup.retrofit2 + retrofit + 2.3.0 + + + com.squareup.retrofit2 + converter-moshi + 2.3.0 + + + + com.squareup.okhttp3 + okhttp + 3.9.1 + + + com.squareup.okhttp3 + logging-interceptor + 3.9.1 + + + + + release + + influxdb:latest + + + + + maven-resources-plugin + 3.0.2 + + + copy-resources + + validate + + copy-resources + + + ${project.build.directory} + + + src/main/resources + true + + docker-compose.yml + + + + + + + + + com.dkanejs.maven.plugins + docker-compose-maven-plugin + 1.0.1 + + + up + process-test-resources + + up + + + ${project.build.directory}/docker-compose.yml + true + + + + down + post-integration-test + + down + + + ${project.build.directory}/docker-compose.yml + true + + + + + + + org.apache.maven.plugins + maven-gpg-plugin + 1.6 + + + sign-artifacts + verify + + sign + + + + + + + + diff --git a/run_influxdb.sh b/run_influxdb.sh deleted file mode 100755 index 2fe5206b9..000000000 --- a/run_influxdb.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/bash - -docker run \ - --detach \ - --name influxdb \ - --publish 8086:8086 \ - --publish 8089:8089/udp \ - --volume ${PWD}/influxdb.conf:/etc/influxdb/influxdb.conf \ - influxdb:latest diff --git a/src/main/resources/docker-compose.yml b/src/main/resources/docker-compose.yml new file mode 100644 index 000000000..91d5d5cea --- /dev/null +++ b/src/main/resources/docker-compose.yml @@ -0,0 +1,11 @@ +version: '3.1' + +services: + # Define an InfluxDB service + influxdb: + image: ${image} + volumes: + - ${project.basedir}/influxdb.conf:/etc/influxdb/influxdb.conf + ports: + - "8086:8086" + - "8089:8089/udp" \ No newline at end of file From 0e78b6c05cbdaeb8d1fd131761e16c53579d5c92 Mon Sep 17 00:00:00 2001 From: Tomas Klapka Date: Tue, 27 Feb 2018 13:51:41 +0100 Subject: [PATCH 146/745] changing line indents --- pom.xml | 670 ++++++++++++++++++++++++++++---------------------------- 1 file changed, 334 insertions(+), 336 deletions(-) diff --git a/pom.xml b/pom.xml index dd0d27f4d..b700c0a79 100644 --- a/pom.xml +++ b/pom.xml @@ -1,355 +1,353 @@ - 4.0.0 - org.influxdb - influxdb-java - jar - 2.9-SNAPSHOT - influxdb java bindings - Java API to access the InfluxDB REST API - http://www.influxdb.org + 4.0.0 + org.influxdb + influxdb-java + jar + 2.9-SNAPSHOT + influxdb java bindings + Java API to access the InfluxDB REST API + http://www.influxdb.org - - 3.2.1 - + + 3.2.1 + - - - The MIT License (MIT) - http://www.opensource.org/licenses/mit-license.php - repo - - - - UTF-8 - + + + The MIT License (MIT) + http://www.opensource.org/licenses/mit-license.php + repo + + + + UTF-8 + - - scm:git:git@github.com:influxdata/influxdb-java.git - scm:git:git@github.com:influxdata/influxdb-java.git - git@github.com:influxdata/influxdb-java.git - + + scm:git:git@github.com:influxdata/influxdb-java.git + scm:git:git@github.com:influxdata/influxdb-java.git + git@github.com:influxdata/influxdb-java.git + - - - majst01 - Stefan Majer - stefan.majer@gmail.com - - - - - - org.codehaus.mojo - findbugs-maven-plugin - 3.0.5 - - true - - target/site - - - - + + + majst01 + Stefan Majer + stefan.majer@gmail.com + + + + + + org.codehaus.mojo + findbugs-maven-plugin + 3.0.5 + + true + + target/site + + + + - - - ossrh - https://oss.sonatype.org/content/repositories/snapshots - - - ossrh - https://oss.sonatype.org/service/local/staging/deploy/maven2/ - - + + + ossrh + https://oss.sonatype.org/content/repositories/snapshots + + + ossrh + https://oss.sonatype.org/service/local/staging/deploy/maven2/ + + - - - - src/main/resources - - docker-compose.yml - - - - - - - org.apache.maven.plugins - maven-compiler-plugin - 3.7.0 - - 1.8 - 1.8 - - - - org.apache.maven.plugins - maven-surefire-plugin - 2.20 - - - org.apache.maven.plugins - maven-site-plugin - 3.7 - - - org.apache.maven.plugins - maven-clean-plugin - 3.0.0 - - - org.apache.maven.plugins - maven-deploy-plugin - 2.8.2 - - - org.apache.maven.plugins - maven-install-plugin - 2.5.2 - - - org.apache.maven.plugins - maven-jar-plugin - 3.0.2 - - - org.apache.maven.plugins - maven-resources-plugin - 3.0.2 - - - org.apache.maven.plugins - maven-release-plugin - 2.5.3 - - - + + + + src/main/resources + + docker-compose.yml + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.7.0 + + 1.8 + 1.8 + + + + org.apache.maven.plugins + maven-surefire-plugin + 2.20 + + + org.apache.maven.plugins + maven-site-plugin + 3.7 + + + org.apache.maven.plugins + maven-clean-plugin + 3.0.0 + + + org.apache.maven.plugins + maven-deploy-plugin + 2.8.2 + + + org.apache.maven.plugins + maven-install-plugin + 2.5.2 + + + org.apache.maven.plugins + maven-jar-plugin + 3.0.2 + + + org.apache.maven.plugins + maven-resources-plugin + 3.0.2 + + + org.apache.maven.plugins + maven-release-plugin + 2.5.3 + + + + + + org.sonatype.plugins + nexus-staging-maven-plugin + 1.6.8 + true + + ossrh + https://oss.sonatype.org/ + true + + + + org.apache.maven.plugins + maven-source-plugin + 3.0.1 + + + attach-sources + + jar-no-fork + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + 3.0.0 + + + attach-javadocs + + jar + + + + + + org.jacoco + jacoco-maven-plugin + 0.8.0 + + + + prepare-agent + + + + report + test + + report + + + + + + org.apache.maven.plugins + maven-checkstyle-plugin + 3.0.0 + + true + checkstyle.xml + true + + + + verify + + checkstyle + + + + + + org.apache.maven.plugins + maven-release-plugin + + + release + + + + + + + org.junit.jupiter + junit-jupiter-engine + 5.1.0 + test + + + org.junit.platform + junit-platform-runner + 1.1.0 + test + + + org.hamcrest + hamcrest-all + 1.3 + test + + + org.assertj + assertj-core + 3.9.0 + test + + + org.mockito + mockito-core + 2.15.0 + test + + + com.squareup.retrofit2 + retrofit + 2.3.0 + + + com.squareup.retrofit2 + converter-moshi + 2.3.0 + + + + com.squareup.okhttp3 + okhttp + 3.9.1 + + + com.squareup.okhttp3 + logging-interceptor + 3.9.1 + + + + + release + + influxdb:latest + + - - - org.sonatype.plugins - nexus-staging-maven-plugin - 1.6.8 - true + + maven-resources-plugin + 3.0.2 + + + copy-resources + + validate + + copy-resources + - ossrh - https://oss.sonatype.org/ - true + ${project.build.directory} + + + src/main/resources + true + + docker-compose.yml + + + - - - org.apache.maven.plugins - maven-source-plugin - 3.0.1 - - - attach-sources - - jar-no-fork - - - - - - - org.apache.maven.plugins - maven-javadoc-plugin - 3.0.0 - - - attach-javadocs - - jar - - - - - - org.jacoco - jacoco-maven-plugin - 0.8.0 - - - - prepare-agent - - - - report - test - - report - - - - - - org.apache.maven.plugins - maven-checkstyle-plugin - 3.0.0 + + + + + com.dkanejs.maven.plugins + docker-compose-maven-plugin + 1.0.1 + + + up + process-test-resources + + up + - true - checkstyle.xml - true + ${project.build.directory}/docker-compose.yml + true - - - verify - - checkstyle - - - - - - org.apache.maven.plugins - maven-release-plugin + + + down + post-integration-test + + down + - - release + ${project.build.directory}/docker-compose.yml + true - + + + + + + org.apache.maven.plugins + maven-gpg-plugin + 1.6 + + + sign-artifacts + verify + + sign + + + + - - - - org.junit.jupiter - junit-jupiter-engine - 5.1.0 - test - - - org.junit.platform - junit-platform-runner - 1.1.0 - test - - - org.hamcrest - hamcrest-all - 1.3 - test - - - org.assertj - assertj-core - 3.9.0 - test - - - org.mockito - mockito-core - 2.15.0 - test - - - com.squareup.retrofit2 - retrofit - 2.3.0 - - - com.squareup.retrofit2 - converter-moshi - 2.3.0 - - - - com.squareup.okhttp3 - okhttp - 3.9.1 - - - com.squareup.okhttp3 - logging-interceptor - 3.9.1 - - - - - release - - influxdb:latest - - - - - maven-resources-plugin - 3.0.2 - - - copy-resources - - validate - - copy-resources - - - ${project.build.directory} - - - src/main/resources - true - - docker-compose.yml - - - - - - - - - com.dkanejs.maven.plugins - docker-compose-maven-plugin - 1.0.1 - - - up - process-test-resources - - up - - - ${project.build.directory}/docker-compose.yml - true - - - - down - post-integration-test - - down - - - ${project.build.directory}/docker-compose.yml - true - - - - - - - org.apache.maven.plugins - maven-gpg-plugin - 1.6 - - - sign-artifacts - verify - - sign - - - - - - - - + + + From d744bc98def3d09c9d94b435d344d26857ddc998 Mon Sep 17 00:00:00 2001 From: Tomas Klapka Date: Tue, 27 Feb 2018 14:19:21 +0100 Subject: [PATCH 147/745] [maven-release-plugin] prepare release influxdb-java-2.9 --- pom.xml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pom.xml b/pom.xml index b700c0a79..cabd81039 100644 --- a/pom.xml +++ b/pom.xml @@ -1,11 +1,10 @@ - + 4.0.0 org.influxdb influxdb-java jar - 2.9-SNAPSHOT + 2.9 influxdb java bindings Java API to access the InfluxDB REST API http://www.influxdb.org @@ -29,6 +28,7 @@ scm:git:git@github.com:influxdata/influxdb-java.git scm:git:git@github.com:influxdata/influxdb-java.git git@github.com:influxdata/influxdb-java.git + influxdb-java-2.9 From 84235c8c258028acdb7188dbc0ffcc493aab561f Mon Sep 17 00:00:00 2001 From: Tomas Klapka Date: Tue, 27 Feb 2018 14:19:29 +0100 Subject: [PATCH 148/745] [maven-release-plugin] prepare for next development iteration --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index cabd81039..4c7809dfd 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.influxdb influxdb-java jar - 2.9 + 2.10-SNAPSHOT influxdb java bindings Java API to access the InfluxDB REST API http://www.influxdb.org @@ -28,7 +28,7 @@ scm:git:git@github.com:influxdata/influxdb-java.git scm:git:git@github.com:influxdata/influxdb-java.git git@github.com:influxdata/influxdb-java.git - influxdb-java-2.9 + HEAD From da1a23d742574806048c83f1b9f355c12b729668 Mon Sep 17 00:00:00 2001 From: ivankudibal Date: Tue, 27 Feb 2018 17:57:37 +0100 Subject: [PATCH 149/745] set release date --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e21aeb30d..452b04f9a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # Changelog -## 2.9 [unreleased] +## 2.9 [2017-02-27] ### Features From f53a7e733aee1ef65fb57e6f42263d1959b454b5 Mon Sep 17 00:00:00 2001 From: ivankudibal Date: Tue, 27 Feb 2018 17:58:50 +0100 Subject: [PATCH 150/745] Revert "set release date" This reverts commit da1a23d742574806048c83f1b9f355c12b729668. --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 452b04f9a..e21aeb30d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # Changelog -## 2.9 [2017-02-27] +## 2.9 [unreleased] ### Features From be3b8b62af32ef64c098fb429a901663545359a6 Mon Sep 17 00:00:00 2001 From: ivankudibal Date: Tue, 27 Feb 2018 17:59:33 +0100 Subject: [PATCH 151/745] set release date --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e21aeb30d..2cb32802d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # Changelog -## 2.9 [unreleased] +## 2.9 [2018-02-27] ### Features From 30589f7e8b1829529f065b7fa4027fd090b0d1f7 Mon Sep 17 00:00:00 2001 From: Iker Aguayo Ureta Date: Thu, 1 Mar 2018 13:42:27 +0100 Subject: [PATCH 152/745] #371 Support dynamic measurement name in InfluxDBResultMapper --- .../influxdb/impl/InfluxDBResultMapper.java | 58 ++++++++++++++----- 1 file changed, 45 insertions(+), 13 deletions(-) diff --git a/src/main/java/org/influxdb/impl/InfluxDBResultMapper.java b/src/main/java/org/influxdb/impl/InfluxDBResultMapper.java index 2188bcd18..dd3097c12 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBResultMapper.java +++ b/src/main/java/org/influxdb/impl/InfluxDBResultMapper.java @@ -75,31 +75,63 @@ public class InfluxDBResultMapper { * @param queryResult the InfluxDB result object * @param clazz the Class that will be used to hold your measurement data * @param the target type + * * @return a {@link List} of objects from the same Class passed as parameter and sorted on the - * same order as received from InfluxDB. + * same order as received from InfluxDB. + * * @throws InfluxDBMapperException If {@link QueryResult} parameter contain errors, - * clazz parameter is not annotated with @Measurement or it was not - * possible to define the values of your POJO (e.g. due to an unsupported field type). + * clazz parameter is not annotated with @Measurement or it was not + * possible to define the values of your POJO (e.g. due to an unsupported field type). */ public List toPOJO(final QueryResult queryResult, final Class clazz) throws InfluxDBMapperException { + throwExceptionIfMissingAnnotation(clazz); + String measurementName = getMeasurementName(clazz); + return this.toPOJO(queryResult, clazz, measurementName); + } + + /** + *

+ * Process a {@link QueryResult} object returned by the InfluxDB client inspecting the internal + * data structure and creating the respective object instances based on the Class passed as + * parameter. + *

+ * + * @param queryResult the InfluxDB result object + * @param clazz the Class that will be used to hold your measurement data + * @param the target type + * @param measurementName name of the Measurement + * + * @return a {@link List} of objects from the same Class passed as parameter and sorted on the + * same order as received from InfluxDB. + * + * @throws InfluxDBMapperException If {@link QueryResult} parameter contain errors, + * clazz parameter is not annotated with @Measurement or it was not + * possible to define the values of your POJO (e.g. due to an unsupported field type). + */ + public List toPOJO(final QueryResult queryResult, final Class clazz, final String measurementName) + throws InfluxDBMapperException { Objects.requireNonNull(queryResult, "queryResult"); Objects.requireNonNull(clazz, "clazz"); - throwExceptionIfMissingAnnotation(clazz); throwExceptionIfResultWithError(queryResult); cacheMeasurementClass(clazz); List result = new LinkedList(); - String measurementName = getMeasurementName(clazz); + + if (measurementName == null || measurementName.isEmpty()) + { + throw new IllegalArgumentException("measurementName must not be null or empty"); + } + queryResult.getResults().stream() - .filter(internalResult -> Objects.nonNull(internalResult) && Objects.nonNull(internalResult.getSeries())) - .forEach(internalResult -> { - internalResult.getSeries().stream() - .filter(series -> series.getName().equals(measurementName)) - .forEachOrdered(series -> { - parseSeriesAs(series, clazz, result); - }); - }); + .filter(internalResult -> Objects.nonNull(internalResult) && Objects.nonNull(internalResult.getSeries())) + .forEach(internalResult -> { + internalResult.getSeries().stream() + .filter(series -> series.getName().equals(measurementName)) + .forEachOrdered(series -> { + parseSeriesAs(series, clazz, result); + }); + }); return result; } From a00cd83a9c0220f30d84b83f2d647e7051f068d7 Mon Sep 17 00:00:00 2001 From: Iker Aguayo Ureta Date: Fri, 2 Mar 2018 08:55:27 +0100 Subject: [PATCH 153/745] indentation changed, added requireNonNull instead of if --- .../influxdb/impl/InfluxDBResultMapper.java | 21 ++++++++----------- 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/src/main/java/org/influxdb/impl/InfluxDBResultMapper.java b/src/main/java/org/influxdb/impl/InfluxDBResultMapper.java index dd3097c12..0c1e9caa3 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBResultMapper.java +++ b/src/main/java/org/influxdb/impl/InfluxDBResultMapper.java @@ -110,6 +110,8 @@ public List toPOJO(final QueryResult queryResult, final Class clazz) t */ public List toPOJO(final QueryResult queryResult, final Class clazz, final String measurementName) throws InfluxDBMapperException { + + Objects.requireNonNull(measurementName, "measurementName"); Objects.requireNonNull(queryResult, "queryResult"); Objects.requireNonNull(clazz, "clazz"); @@ -118,19 +120,14 @@ public List toPOJO(final QueryResult queryResult, final Class clazz, f List result = new LinkedList(); - if (measurementName == null || measurementName.isEmpty()) - { - throw new IllegalArgumentException("measurementName must not be null or empty"); - } - queryResult.getResults().stream() - .filter(internalResult -> Objects.nonNull(internalResult) && Objects.nonNull(internalResult.getSeries())) - .forEach(internalResult -> { - internalResult.getSeries().stream() - .filter(series -> series.getName().equals(measurementName)) - .forEachOrdered(series -> { - parseSeriesAs(series, clazz, result); - }); + .filter(internalResult -> Objects.nonNull(internalResult) && Objects.nonNull(internalResult.getSeries())) + .forEach(internalResult -> { + internalResult.getSeries().stream() + .filter(series -> series.getName().equals(measurementName)) + .forEachOrdered(series -> { + parseSeriesAs(series, clazz, result); + }); }); return result; From 17b2f248a5c153772e589a3d3346998f4146d65d Mon Sep 17 00:00:00 2001 From: Michael Schaefers Date: Fri, 9 Mar 2018 15:26:45 +0100 Subject: [PATCH 154/745] Performance: Escape fields and keys more efficiently than using String.replace twice per value. --- src/main/java/org/influxdb/dto/Point.java | 55 ++++++++++++++++------- 1 file changed, 38 insertions(+), 17 deletions(-) diff --git a/src/main/java/org/influxdb/dto/Point.java b/src/main/java/org/influxdb/dto/Point.java index 8c0f91b79..ad33c4b72 100644 --- a/src/main/java/org/influxdb/dto/Point.java +++ b/src/main/java/org/influxdb/dto/Point.java @@ -10,7 +10,6 @@ import java.util.Objects; import java.util.TreeMap; import java.util.concurrent.TimeUnit; -import java.util.function.Function; import org.influxdb.impl.Preconditions; @@ -26,11 +25,6 @@ public class Point { private Long time; private TimeUnit precision = TimeUnit.NANOSECONDS; private Map fields; - - private static final Function FIELD_ESCAPER = s -> - s.replace("\\", "\\\\").replace("\"", "\\\""); - private static final Function KEY_ESCAPER = s -> - s.replace(" ", "\\ ").replace(",", "\\,").replace("=", "\\="); private static final int MAX_FRACTION_DIGITS = 340; private static final ThreadLocal NUMBER_FORMATTER = ThreadLocal.withInitial(() -> { @@ -182,7 +176,7 @@ public Builder fields(final Map fieldsToAdd) { /** * Add a time to this point. * - * @param timeToSet the time for this point + * @param timeToSet the time for this point * @param precisionToSet the TimeUnit * @return the Builder instance. */ @@ -205,8 +199,8 @@ public Point build() { point.setFields(this.fields); point.setMeasurement(this.measurement); if (this.time != null) { - point.setTime(this.time); - point.setPrecision(this.precision); + point.setTime(this.time); + point.setPrecision(this.precision); } point.setTags(this.tags); return point; @@ -329,10 +323,10 @@ public String lineProtocol() { private void concatenatedTags(final StringBuilder sb) { for (Entry tag : this.tags.entrySet()) { - sb.append(',') - .append(KEY_ESCAPER.apply(tag.getKey())) - .append('=') - .append(KEY_ESCAPER.apply(tag.getValue())); + sb.append(','); + escapeKey(sb, tag.getKey()); + sb.append('='); + escapeKey(sb, tag.getValue()); } sb.append(' '); } @@ -343,8 +337,8 @@ private void concatenatedFields(final StringBuilder sb) { if (value == null) { continue; } - - sb.append(KEY_ESCAPER.apply(field.getKey())).append('='); + escapeKey(sb, field.getKey()); + sb.append('='); if (value instanceof Number) { if (value instanceof Double || value instanceof Float || value instanceof BigDecimal) { sb.append(NUMBER_FORMATTER.get().format(value)); @@ -353,7 +347,9 @@ private void concatenatedFields(final StringBuilder sb) { } } else if (value instanceof String) { String stringValue = (String) value; - sb.append('"').append(FIELD_ESCAPER.apply(stringValue)).append('"'); + sb.append('"'); + escapeField(sb, stringValue); + sb.append('"'); } else { sb.append(value); } @@ -368,6 +364,31 @@ private void concatenatedFields(final StringBuilder sb) { } } + static void escapeKey(StringBuilder sb, String key) { + for (int i = 0; i < key.length(); i++) { + switch (key.charAt(i)) { + case ' ': + case ',': + case '=': + sb.append('\\'); + default: + sb.append(key.charAt(i)); + } + } + } + + static void escapeField(StringBuilder sb, String field) { + for (int i = 0; i < field.length(); i++) { + switch (field.charAt(i)) { + case '\\': + case '\"': + sb.append('\\'); + default: + sb.append(field.charAt(i)); + } + } + } + private void formatedTime(final StringBuilder sb) { if (this.time == null || this.precision == null) { return; @@ -380,7 +401,7 @@ private static class MeasurementStringBuilder { private final int length; MeasurementStringBuilder(final String measurement) { - this.sb.append(KEY_ESCAPER.apply(measurement)); + escapeKey(this.sb, measurement); this.length = sb.length(); } From e2169b3f03cda481975cb7fb872988c4354e912e Mon Sep 17 00:00:00 2001 From: Michael Schaefers Date: Fri, 9 Mar 2018 15:36:21 +0100 Subject: [PATCH 155/745] fix travis errors --- src/main/java/org/influxdb/dto/Point.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/org/influxdb/dto/Point.java b/src/main/java/org/influxdb/dto/Point.java index ad33c4b72..35f7f3a1c 100644 --- a/src/main/java/org/influxdb/dto/Point.java +++ b/src/main/java/org/influxdb/dto/Point.java @@ -364,7 +364,7 @@ private void concatenatedFields(final StringBuilder sb) { } } - static void escapeKey(StringBuilder sb, String key) { + static void escapeKey(final StringBuilder sb, final String key) { for (int i = 0; i < key.length(); i++) { switch (key.charAt(i)) { case ' ': @@ -377,7 +377,7 @@ static void escapeKey(StringBuilder sb, String key) { } } - static void escapeField(StringBuilder sb, String field) { + static void escapeField(final StringBuilder sb, final String field) { for (int i = 0; i < field.length(); i++) { switch (field.charAt(i)) { case '\\': From 85707c56f23fa1eef00cf211fb926f7e469cb5f1 Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Sat, 10 Mar 2018 12:39:07 +0100 Subject: [PATCH 156/745] run tests against influxdb 1.5 as well --- compile-and-test.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compile-and-test.sh b/compile-and-test.sh index 67323c40f..8b10906d0 100755 --- a/compile-and-test.sh +++ b/compile-and-test.sh @@ -4,7 +4,7 @@ # set -e -INFLUXDB_VERSIONS="1.4 1.3 1.2 1.1" +INFLUXDB_VERSIONS="1.5 1.4 1.3 1.2 1.1" JAVA_VERSIONS="3-jdk-8-alpine 3-jdk-9-slim" From f1520ccb056b5af4483a34b2840b1c2109ec9c6a Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Sat, 10 Mar 2018 12:42:04 +0100 Subject: [PATCH 157/745] Update release informations in README.md --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 5e3c2e714..6a41e89fa 100644 --- a/README.md +++ b/README.md @@ -287,12 +287,12 @@ The latest version for maven dependence: org.influxdb influxdb-java - 2.8 + 2.9 ``` Or when using with gradle: ```groovy -compile 'org.influxdb:influxdb-java:2.8' +compile 'org.influxdb:influxdb-java:2.9' ``` For version change history have a look at [ChangeLog](https://github.com/influxdata/influxdb-java/blob/master/CHANGELOG.md). From 6d610fcfae17a5a4394cbcd9688468be0ad2c3bd Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Sat, 10 Mar 2018 14:11:54 +0100 Subject: [PATCH 158/745] Update dependencies --- pom.xml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pom.xml b/pom.xml index 4c7809dfd..cbea0c021 100644 --- a/pom.xml +++ b/pom.xml @@ -236,7 +236,7 @@ org.assertj assertj-core - 3.9.0 + 3.9.1 test @@ -260,12 +260,12 @@ com.squareup.okhttp3 okhttp - 3.9.1 + 3.10.0 com.squareup.okhttp3 logging-interceptor - 3.9.1 + 3.10.0
@@ -305,7 +305,7 @@ com.dkanejs.maven.plugins docker-compose-maven-plugin - 1.0.1 + 1.0.3 up From 458d4ea6156172298896cb076923da9204999160 Mon Sep 17 00:00:00 2001 From: Michael Schaefers Date: Sat, 10 Mar 2018 15:00:24 +0100 Subject: [PATCH 159/745] revert wrong formatting as requested --- src/main/java/org/influxdb/dto/Point.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/java/org/influxdb/dto/Point.java b/src/main/java/org/influxdb/dto/Point.java index 35f7f3a1c..acac4a82d 100644 --- a/src/main/java/org/influxdb/dto/Point.java +++ b/src/main/java/org/influxdb/dto/Point.java @@ -176,7 +176,7 @@ public Builder fields(final Map fieldsToAdd) { /** * Add a time to this point. * - * @param timeToSet the time for this point + * @param timeToSet the time for this point * @param precisionToSet the TimeUnit * @return the Builder instance. */ @@ -199,8 +199,8 @@ public Point build() { point.setFields(this.fields); point.setMeasurement(this.measurement); if (this.time != null) { - point.setTime(this.time); - point.setPrecision(this.precision); + point.setTime(this.time); + point.setPrecision(this.precision); } point.setTags(this.tags); return point; From 829749435a2a13fc27160e9d46fd58035024bb67 Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Sat, 10 Mar 2018 15:31:31 +0100 Subject: [PATCH 160/745] Performance improvement: use chained StringBuilder calls instead of single calls, closes #425 --- .../java/org/influxdb/dto/BatchPoints.java | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/src/main/java/org/influxdb/dto/BatchPoints.java b/src/main/java/org/influxdb/dto/BatchPoints.java index 9d29d6cd3..6bf16b92c 100644 --- a/src/main/java/org/influxdb/dto/BatchPoints.java +++ b/src/main/java/org/influxdb/dto/BatchPoints.java @@ -253,17 +253,17 @@ public int hashCode() { @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("BatchPoints [database="); - builder.append(this.database); - builder.append(", retentionPolicy="); - builder.append(this.retentionPolicy); - builder.append(", consistency="); - builder.append(this.consistency); - builder.append(", tags="); - builder.append(this.tags); - builder.append(", points="); - builder.append(this.points); - builder.append("]"); + builder.append("BatchPoints [database=") + .append(this.database) + .append(", retentionPolicy=") + .append(this.retentionPolicy) + .append(", consistency=") + .append(this.consistency) + .append(", tags=") + .append(this.tags) + .append(", points=") + .append(this.points) + .append("]"); return builder.toString(); } From 2ea9ec646cbc935f3421de543a598d4081bdcc2c Mon Sep 17 00:00:00 2001 From: Henrik Niehaus Date: Thu, 15 Mar 2018 19:14:27 +0100 Subject: [PATCH 161/745] First simple implementation of "prepared statements" I added a new query class BoundParameterQuery, which can be used for "prepared statements". The constructor accepts a InfluxQL expression with placeholders, a DB name and a varags list to bind the parameters to the placeholders. I also extended the InfluxDBService, so that the HTTP requests contain the "params" parameter. The InfluxDBImpl now differentiates between a Query and a BoundParameterQuery. This is not the cleanest solution, because this has to be done at a few locations, but I didn't want to change too much of the code. --- .../org/influxdb/dto/BoundParameterQuery.java | 81 ++++++++++++++++ .../java/org/influxdb/impl/InfluxDBImpl.java | 43 +++++++-- .../org/influxdb/impl/InfluxDBService.java | 16 ++++ .../influxdb/dto/BoundParameterQueryTest.java | 93 +++++++++++++++++++ 4 files changed, 224 insertions(+), 9 deletions(-) create mode 100644 src/main/java/org/influxdb/dto/BoundParameterQuery.java create mode 100644 src/test/java/org/influxdb/dto/BoundParameterQueryTest.java diff --git a/src/main/java/org/influxdb/dto/BoundParameterQuery.java b/src/main/java/org/influxdb/dto/BoundParameterQuery.java new file mode 100644 index 000000000..cedf8b8a7 --- /dev/null +++ b/src/main/java/org/influxdb/dto/BoundParameterQuery.java @@ -0,0 +1,81 @@ +package org.influxdb.dto; + +import java.io.IOException; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import com.squareup.moshi.JsonWriter; + +import okio.Buffer; + +public class BoundParameterQuery extends Query { + + private final Object[] params; + + public BoundParameterQuery(final String command, final String database, final Object...params) { + super(command, database, true); + this.params = params; + } + + public String getParameterJsonWithUrlEncoded() { + try { + List placeholders = parsePlaceHolders(getCommand()); + Map parameterMap = createParameterMap(placeholders, params); + String jsonParameterObject = createJsonObject(parameterMap); + String urlEncodedJsonParameterObject = encode(jsonParameterObject); + return urlEncodedJsonParameterObject; + } catch (IOException e) { + throw new RuntimeException("Couldn't create parameter JSON object", e); + } + } + + private String createJsonObject(final Map parameterMap) throws IOException { + Buffer b = new Buffer(); + JsonWriter writer = JsonWriter.of(b); + writer.beginObject(); + for (Entry pair : parameterMap.entrySet()) { + String name = pair.getKey(); + Object value = pair.getValue(); + if (value instanceof Number) { + writer.name(name).value((Number) value); + } else if (value instanceof String) { + writer.name(name).value((String) value); + } else if (value instanceof Boolean) { + writer.name(name).value((Boolean) value); + } else { + writer.name(name).value(value.toString()); + } + } + writer.endObject(); + return b.readString(Charset.forName("utf-8")); + } + + private Map createParameterMap(final List placeholders, final Object[] params) { + if (placeholders.size() != params.length) { + throw new RuntimeException("Unbalanced amount of placeholders and parameters"); + } + + Map parameterMap = new HashMap<>(); + int index = 0; + for (String placeholder : placeholders) { + parameterMap.put(placeholder, params[index++]); + } + return parameterMap; + } + + private List parsePlaceHolders(final String command) { + List placeHolders = new ArrayList<>(); + Pattern p = Pattern.compile("\\s+\\$(\\w+?)(?:\\s|$)"); + Matcher m = p.matcher(getCommand()); + while (m.find()) { + placeHolders.add(m.group(1)); + } + return placeHolders; + } +} diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index ee221ab4d..e94c2b92b 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -18,6 +18,7 @@ import org.influxdb.InfluxDBException; import org.influxdb.InfluxDBIOException; import org.influxdb.dto.BatchPoints; +import org.influxdb.dto.BoundParameterQuery; import org.influxdb.dto.Point; import org.influxdb.dto.Pong; import org.influxdb.dto.Query; @@ -454,8 +455,16 @@ public void query(final Query query, final int chunkSize, final Consumer call = this.influxDBService.query(this.username, this.password, - query.getDatabase(), query.getCommandWithUrlEncoded(), chunkSize); + Call call = null; + if (query instanceof BoundParameterQuery) { + BoundParameterQuery boundParameterQuery = (BoundParameterQuery) query; + call = this.influxDBService.query(this.username, this.password, + query.getDatabase(), query.getCommandWithUrlEncoded(), chunkSize, + boundParameterQuery.getParameterJsonWithUrlEncoded()); + } else { + call = this.influxDBService.query(this.username, this.password, + query.getDatabase(), query.getCommandWithUrlEncoded(), chunkSize); + } call.enqueue(new Callback() { @Override @@ -496,8 +505,17 @@ public void onFailure(final Call call, final Throwable t) { */ @Override public QueryResult query(final Query query, final TimeUnit timeUnit) { - return execute(this.influxDBService.query(this.username, this.password, query.getDatabase(), - TimeUtil.toTimePrecision(timeUnit), query.getCommandWithUrlEncoded())); + Call call = null; + if (query instanceof BoundParameterQuery) { + BoundParameterQuery boundParameterQuery = (BoundParameterQuery) query; + call = this.influxDBService.query(this.username, this.password, query.getDatabase(), + TimeUtil.toTimePrecision(timeUnit), query.getCommandWithUrlEncoded(), + boundParameterQuery.getParameterJsonWithUrlEncoded()); + } else { + call = this.influxDBService.query(this.username, this.password, query.getDatabase(), + TimeUtil.toTimePrecision(timeUnit), query.getCommandWithUrlEncoded()); + } + return execute(call); } /** @@ -560,12 +578,19 @@ public boolean databaseExists(final String name) { */ private Call callQuery(final Query query) { Call call; - if (query.requiresPost()) { - call = this.influxDBService.postQuery(this.username, - this.password, query.getDatabase(), query.getCommandWithUrlEncoded()); + if (query instanceof BoundParameterQuery) { + BoundParameterQuery boundParameterQuery = (BoundParameterQuery) query; + call = this.influxDBService.postQuery(this.username, + this.password, query.getDatabase(), query.getCommandWithUrlEncoded(), + boundParameterQuery.getParameterJsonWithUrlEncoded()); } else { - call = this.influxDBService.query(this.username, - this.password, query.getDatabase(), query.getCommandWithUrlEncoded()); + if (query.requiresPost()) { + call = this.influxDBService.postQuery(this.username, + this.password, query.getDatabase(), query.getCommandWithUrlEncoded()); + } else { + call = this.influxDBService.query(this.username, + this.password, query.getDatabase(), query.getCommandWithUrlEncoded()); + } } return call; } diff --git a/src/main/java/org/influxdb/impl/InfluxDBService.java b/src/main/java/org/influxdb/impl/InfluxDBService.java index 6485f8654..4876b5652 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBService.java +++ b/src/main/java/org/influxdb/impl/InfluxDBService.java @@ -18,6 +18,7 @@ interface InfluxDBService { public static final String Q = "q"; public static final String DB = "db"; public static final String RP = "rp"; + public static final String PARAMS = "params"; public static final String PRECISION = "precision"; public static final String CONSISTENCY = "consistency"; public static final String EPOCH = "epoch"; @@ -47,6 +48,11 @@ public Call writePoints(@Query(U) String username, public Call query(@Query(U) String username, @Query(P) String password, @Query(DB) String db, @Query(EPOCH) String epoch, @Query(value = Q, encoded = true) String query); + @POST("/query") + public Call query(@Query(U) String username, @Query(P) String password, @Query(DB) String db, + @Query(EPOCH) String epoch, @Query(value = Q, encoded = true) String query, + @Query(value = PARAMS, encoded = true) String params); + @GET("/query") public Call query(@Query(U) String username, @Query(P) String password, @Query(DB) String db, @Query(value = Q, encoded = true) String query); @@ -55,6 +61,10 @@ public Call query(@Query(U) String username, @Query(P) String passw public Call postQuery(@Query(U) String username, @Query(P) String password, @Query(DB) String db, @Query(value = Q, encoded = true) String query); + @POST("/query") + public Call postQuery(@Query(U) String username, @Query(P) String password, @Query(DB) String db, + @Query(value = Q, encoded = true) String query, @Query(value = PARAMS, encoded = true) String params); + @GET("/query") public Call query(@Query(U) String username, @Query(P) String password, @Query(value = Q, encoded = true) String query); @@ -68,4 +78,10 @@ public Call postQuery(@Query(U) String username, public Call query(@Query(U) String username, @Query(P) String password, @Query(DB) String db, @Query(value = Q, encoded = true) String query, @Query(CHUNK_SIZE) int chunkSize); + + @Streaming + @POST("/query?chunked=true") + public Call query(@Query(U) String username, + @Query(P) String password, @Query(DB) String db, @Query(value = Q, encoded = true) String query, + @Query(CHUNK_SIZE) int chunkSize, @Query(value = PARAMS, encoded = true) String params); } diff --git a/src/test/java/org/influxdb/dto/BoundParameterQueryTest.java b/src/test/java/org/influxdb/dto/BoundParameterQueryTest.java new file mode 100644 index 000000000..f9f8500b7 --- /dev/null +++ b/src/test/java/org/influxdb/dto/BoundParameterQueryTest.java @@ -0,0 +1,93 @@ +package org.influxdb.dto; + +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.net.URLDecoder; +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; +import java.util.HashMap; +import java.util.Map; + +import org.junit.Assert; +import org.junit.jupiter.api.Test; +import org.junit.platform.runner.JUnitPlatform; +import org.junit.runner.RunWith; + +import com.squareup.moshi.JsonReader; + +import okio.Buffer; + + +/** + * Test for the BoundParameterQuery DTO. + */ +@RunWith(JUnitPlatform.class) +public class BoundParameterQueryTest { + + @Test + public void testSingleCharacterPlaceHolderParsing() throws IOException { + BoundParameterQuery query = new BoundParameterQuery("SELECT * FROM abc WHERE a > $a AND b < $b", "foobar", 0, 10); + Map params = readObject(decode(query.getParameterJsonWithUrlEncoded())); + Assert.assertEquals(2, params.size()); + Assert.assertEquals(params.get("a"), 0.0); + Assert.assertEquals(params.get("b"), 10.0); + } + + @Test + public void testPlaceHolderParsing() throws IOException { + BoundParameterQuery query = new BoundParameterQuery("SELECT * FROM abc WHERE a > $abc AND b < $bcd", "foobar", 0, 10); + Map params = readObject(decode(query.getParameterJsonWithUrlEncoded())); + Assert.assertEquals(2, params.size()); + Assert.assertEquals(params.get("abc"), 0.0); + Assert.assertEquals(params.get("bcd"), 10.0); + } + + @Test + public void testIgnoreInvalidPlaceholders() throws UnsupportedEncodingException { + BoundParameterQuery query = new BoundParameterQuery("SELECT * FROM abc WHERE a > $", "foobar"); + Assert.assertEquals(decode(query.getParameterJsonWithUrlEncoded()), "{}"); + + query = new BoundParameterQuery("SELECT * FROM abc WHERE a > $abc$cde", "foobar"); + Assert.assertEquals(decode(query.getParameterJsonWithUrlEncoded()), "{}"); + } + + @Test + public void testUnbalancedQuery() throws UnsupportedEncodingException { + // too many placeholders + try { + BoundParameterQuery query = new BoundParameterQuery("SELECT * FROM abc WHERE a > $abc AND b < $bcd", "foobar", 0); + query.getParameterJsonWithUrlEncoded(); + Assert.fail("Expected RuntimeException because of unbalanced placeholders and parameters"); + } catch (RuntimeException rte) { + // expected + } + + // too many parameters + try { + BoundParameterQuery query = new BoundParameterQuery("SELECT * FROM abc WHERE a > $abc", "foobar", 0, 10); + query.getParameterJsonWithUrlEncoded(); + Assert.fail("Expected RuntimeException because of unbalanced placeholders and parameters"); + } catch (RuntimeException rte) { + // expected + } + } + + private Map readObject(String json) throws IOException { + Buffer source = new Buffer(); + source.writeString(json, Charset.forName("utf-8")); + Map params = new HashMap<>(); + JsonReader reader = JsonReader.of(source); + reader.beginObject(); + while(reader.hasNext()) { + String name = reader.nextName(); + Object value = reader.readJsonValue(); + params.put(name, value); + } + reader.endObject(); + return params; + } + + private static String decode(String str) throws UnsupportedEncodingException { + return URLDecoder.decode(str, StandardCharsets.UTF_8.toString()); + } +} From 658fa1d3dec6657961c3743e2438a4b1dfbda18e Mon Sep 17 00:00:00 2001 From: Henrik Niehaus Date: Thu, 15 Mar 2018 19:25:02 +0100 Subject: [PATCH 162/745] Added another test for BoundParameterQuery --- .../org/influxdb/dto/BoundParameterQueryTest.java | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/src/test/java/org/influxdb/dto/BoundParameterQueryTest.java b/src/test/java/org/influxdb/dto/BoundParameterQueryTest.java index f9f8500b7..5ef16e2c9 100644 --- a/src/test/java/org/influxdb/dto/BoundParameterQueryTest.java +++ b/src/test/java/org/influxdb/dto/BoundParameterQueryTest.java @@ -42,11 +42,20 @@ public void testPlaceHolderParsing() throws IOException { Assert.assertEquals(params.get("bcd"), 10.0); } + @Test + public void testPlaceHolderParsingWithLimitClause() throws IOException { + BoundParameterQuery query = new BoundParameterQuery("SELECT * FROM abc WHERE a > $abc AND b < $bcd LIMIT 10", "foobar", 0, 10); + Map params = readObject(decode(query.getParameterJsonWithUrlEncoded())); + Assert.assertEquals(2, params.size()); + Assert.assertEquals(params.get("abc"), 0.0); + Assert.assertEquals(params.get("bcd"), 10.0); + } + @Test public void testIgnoreInvalidPlaceholders() throws UnsupportedEncodingException { BoundParameterQuery query = new BoundParameterQuery("SELECT * FROM abc WHERE a > $", "foobar"); Assert.assertEquals(decode(query.getParameterJsonWithUrlEncoded()), "{}"); - + query = new BoundParameterQuery("SELECT * FROM abc WHERE a > $abc$cde", "foobar"); Assert.assertEquals(decode(query.getParameterJsonWithUrlEncoded()), "{}"); } @@ -71,7 +80,7 @@ public void testUnbalancedQuery() throws UnsupportedEncodingException { // expected } } - + private Map readObject(String json) throws IOException { Buffer source = new Buffer(); source.writeString(json, Charset.forName("utf-8")); @@ -86,7 +95,7 @@ private Map readObject(String json) throws IOException { reader.endObject(); return params; } - + private static String decode(String str) throws UnsupportedEncodingException { return URLDecoder.decode(str, StandardCharsets.UTF_8.toString()); } From d71f316612250d7af16596f1d3b87a4136353eeb Mon Sep 17 00:00:00 2001 From: Henrik Niehaus Date: Fri, 16 Mar 2018 18:40:21 +0100 Subject: [PATCH 163/745] Implemented Builder mechanism for BoundParameterQuery This Builder avoid very long constructor calls and also makes it more obvious, which value is bound to which placeholder. --- .../org/influxdb/dto/BoundParameterQuery.java | 68 ++++++--- .../influxdb/dto/BoundParameterQueryTest.java | 129 ++++++++++-------- 2 files changed, 122 insertions(+), 75 deletions(-) diff --git a/src/main/java/org/influxdb/dto/BoundParameterQuery.java b/src/main/java/org/influxdb/dto/BoundParameterQuery.java index cedf8b8a7..117a0e123 100644 --- a/src/main/java/org/influxdb/dto/BoundParameterQuery.java +++ b/src/main/java/org/influxdb/dto/BoundParameterQuery.java @@ -1,5 +1,6 @@ package org.influxdb.dto; +import com.squareup.moshi.JsonWriter; import java.io.IOException; import java.nio.charset.Charset; import java.util.ArrayList; @@ -9,25 +10,26 @@ import java.util.Map.Entry; import java.util.regex.Matcher; import java.util.regex.Pattern; - -import com.squareup.moshi.JsonWriter; - import okio.Buffer; public class BoundParameterQuery extends Query { - private final Object[] params; + private final Map params = new HashMap<>(); - public BoundParameterQuery(final String command, final String database, final Object...params) { + private BoundParameterQuery(final String command, final String database) { super(command, database, true); - this.params = params; + } + + public BoundParameterQuery bind(String placeholder, Object value) { + params.put(placeholder, value); + return this; } public String getParameterJsonWithUrlEncoded() { try { List placeholders = parsePlaceHolders(getCommand()); - Map parameterMap = createParameterMap(placeholders, params); - String jsonParameterObject = createJsonObject(parameterMap); + assurePlaceholdersAreBound(placeholders, params); + String jsonParameterObject = createJsonObject(params); String urlEncodedJsonParameterObject = encode(jsonParameterObject); return urlEncodedJsonParameterObject; } catch (IOException e) { @@ -35,6 +37,18 @@ public String getParameterJsonWithUrlEncoded() { } } + private void assurePlaceholdersAreBound(List placeholders, Map params) { + if (placeholders.size() != params.size()) { + throw new RuntimeException("Unbalanced amount of placeholders and parameters"); + } + + for (String placeholder : placeholders) { + if (params.get(placeholder) == null) { + throw new RuntimeException("Placeholder $" + placeholder + " is not bound"); + } + } + } + private String createJsonObject(final Map parameterMap) throws IOException { Buffer b = new Buffer(); JsonWriter writer = JsonWriter.of(b); @@ -56,19 +70,6 @@ private String createJsonObject(final Map parameterMap) throws I return b.readString(Charset.forName("utf-8")); } - private Map createParameterMap(final List placeholders, final Object[] params) { - if (placeholders.size() != params.length) { - throw new RuntimeException("Unbalanced amount of placeholders and parameters"); - } - - Map parameterMap = new HashMap<>(); - int index = 0; - for (String placeholder : placeholders) { - parameterMap.put(placeholder, params[index++]); - } - return parameterMap; - } - private List parsePlaceHolders(final String command) { List placeHolders = new ArrayList<>(); Pattern p = Pattern.compile("\\s+\\$(\\w+?)(?:\\s|$)"); @@ -78,4 +79,29 @@ private List parsePlaceHolders(final String command) { } return placeHolders; } + + public static class QueryBuilder { + private BoundParameterQuery query; + private String influxQL; + + public static QueryBuilder newQuery(String influxQL) { + QueryBuilder instance = new QueryBuilder(); + instance.influxQL = influxQL; + return instance; + } + + public QueryBuilder forDatabase(String database) { + query = new BoundParameterQuery(influxQL, database); + return this; + } + + public QueryBuilder bind(String placeholder, Object value) { + query.params.put(placeholder, value); + return this; + } + + public BoundParameterQuery create() { + return query; + } + } } diff --git a/src/test/java/org/influxdb/dto/BoundParameterQueryTest.java b/src/test/java/org/influxdb/dto/BoundParameterQueryTest.java index 5ef16e2c9..fd71e3bab 100644 --- a/src/test/java/org/influxdb/dto/BoundParameterQueryTest.java +++ b/src/test/java/org/influxdb/dto/BoundParameterQueryTest.java @@ -8,6 +8,7 @@ import java.util.HashMap; import java.util.Map; +import org.influxdb.dto.BoundParameterQuery.QueryBuilder; import org.junit.Assert; import org.junit.jupiter.api.Test; import org.junit.platform.runner.JUnitPlatform; @@ -17,86 +18,106 @@ import okio.Buffer; - /** * Test for the BoundParameterQuery DTO. */ @RunWith(JUnitPlatform.class) public class BoundParameterQueryTest { - @Test - public void testSingleCharacterPlaceHolderParsing() throws IOException { - BoundParameterQuery query = new BoundParameterQuery("SELECT * FROM abc WHERE a > $a AND b < $b", "foobar", 0, 10); - Map params = readObject(decode(query.getParameterJsonWithUrlEncoded())); - Assert.assertEquals(2, params.size()); - Assert.assertEquals(params.get("a"), 0.0); - Assert.assertEquals(params.get("b"), 10.0); - } + @Test + public void testSingleCharacterPlaceHolderParsing() throws IOException { + BoundParameterQuery query = QueryBuilder.newQuery("SELECT * FROM abc WHERE a > $a AND b < $b") + .forDatabase("foobar") + .bind("a", 0) + .bind("b", 10) + .create(); + Map params = readObject(decode(query.getParameterJsonWithUrlEncoded())); + Assert.assertEquals(2, params.size()); + Assert.assertEquals(params.get("a"), 0.0); + Assert.assertEquals(params.get("b"), 10.0); + } - @Test - public void testPlaceHolderParsing() throws IOException { - BoundParameterQuery query = new BoundParameterQuery("SELECT * FROM abc WHERE a > $abc AND b < $bcd", "foobar", 0, 10); - Map params = readObject(decode(query.getParameterJsonWithUrlEncoded())); - Assert.assertEquals(2, params.size()); + @Test + public void testPlaceHolderParsing() throws IOException { + BoundParameterQuery query = QueryBuilder.newQuery("SELECT * FROM abc WHERE a > $abc AND b < $bcd") + .forDatabase("foobar") + .bind("abc", 0) + .bind("bcd", 10) + .create(); + Map params = readObject(decode(query.getParameterJsonWithUrlEncoded())); + Assert.assertEquals(2, params.size()); Assert.assertEquals(params.get("abc"), 0.0); Assert.assertEquals(params.get("bcd"), 10.0); - } + } - @Test + @Test public void testPlaceHolderParsingWithLimitClause() throws IOException { - BoundParameterQuery query = new BoundParameterQuery("SELECT * FROM abc WHERE a > $abc AND b < $bcd LIMIT 10", "foobar", 0, 10); + BoundParameterQuery query = QueryBuilder.newQuery("SELECT * FROM abc WHERE a > $abc AND b < $bcd LIMIT 10") + .forDatabase("foobar") + .bind("abc", 0) + .bind("bcd", 10) + .create(); Map params = readObject(decode(query.getParameterJsonWithUrlEncoded())); Assert.assertEquals(2, params.size()); Assert.assertEquals(params.get("abc"), 0.0); Assert.assertEquals(params.get("bcd"), 10.0); } - @Test - public void testIgnoreInvalidPlaceholders() throws UnsupportedEncodingException { - BoundParameterQuery query = new BoundParameterQuery("SELECT * FROM abc WHERE a > $", "foobar"); - Assert.assertEquals(decode(query.getParameterJsonWithUrlEncoded()), "{}"); + @Test + public void testIgnoreInvalidPlaceholders() throws UnsupportedEncodingException { + BoundParameterQuery query = QueryBuilder.newQuery("SELECT * FROM abc WHERE a > $") + .forDatabase("foobar") + .create(); + Assert.assertEquals(decode(query.getParameterJsonWithUrlEncoded()), "{}"); - query = new BoundParameterQuery("SELECT * FROM abc WHERE a > $abc$cde", "foobar"); - Assert.assertEquals(decode(query.getParameterJsonWithUrlEncoded()), "{}"); - } + query = QueryBuilder.newQuery("SELECT * FROM abc WHERE a > $abc$cde").forDatabase("foobar").create(); + Assert.assertEquals(decode(query.getParameterJsonWithUrlEncoded()), "{}"); + } - @Test - public void testUnbalancedQuery() throws UnsupportedEncodingException { - // too many placeholders - try { - BoundParameterQuery query = new BoundParameterQuery("SELECT * FROM abc WHERE a > $abc AND b < $bcd", "foobar", 0); - query.getParameterJsonWithUrlEncoded(); - Assert.fail("Expected RuntimeException because of unbalanced placeholders and parameters"); - } catch (RuntimeException rte) { - // expected - } + @Test + public void testUnbalancedQuery() throws UnsupportedEncodingException { + // too many placeholders + try { + BoundParameterQuery query = QueryBuilder.newQuery("SELECT * FROM abc WHERE a > $abc AND b < $bcd") + .forDatabase("foobar") + .bind("abc", 0) + .create(); + query.getParameterJsonWithUrlEncoded(); + Assert.fail("Expected RuntimeException because of unbalanced placeholders and parameters"); + } catch (RuntimeException rte) { + // expected + } - // too many parameters - try { - BoundParameterQuery query = new BoundParameterQuery("SELECT * FROM abc WHERE a > $abc", "foobar", 0, 10); - query.getParameterJsonWithUrlEncoded(); - Assert.fail("Expected RuntimeException because of unbalanced placeholders and parameters"); - } catch (RuntimeException rte) { - // expected - } - } + // too many parameters + try { + BoundParameterQuery query = QueryBuilder.newQuery("SELECT * FROM abc WHERE a > $abc") + .forDatabase("foobar") + .bind("abc", 0) + .bind("bcd", 10) + .create(); + query.getParameterJsonWithUrlEncoded(); + Assert.fail("Expected RuntimeException because of unbalanced placeholders and parameters"); + } catch (RuntimeException rte) { + // expected + } + } - private Map readObject(String json) throws IOException { - Buffer source = new Buffer(); - source.writeString(json, Charset.forName("utf-8")); - Map params = new HashMap<>(); - JsonReader reader = JsonReader.of(source); - reader.beginObject(); - while(reader.hasNext()) { + private Map readObject(String json) throws IOException { + Buffer source = new Buffer(); + source.writeString(json, Charset.forName("utf-8")); + Map params = new HashMap<>(); + JsonReader reader = JsonReader.of(source); + reader.beginObject(); + while (reader.hasNext()) { String name = reader.nextName(); Object value = reader.readJsonValue(); params.put(name, value); } - reader.endObject(); - return params; - } + reader.endObject(); + return params; + } - private static String decode(String str) throws UnsupportedEncodingException { + private static String decode(String str) throws UnsupportedEncodingException { return URLDecoder.decode(str, StandardCharsets.UTF_8.toString()); } } From 929cfdeb8a9242adec3a3b5162d203d91b993930 Mon Sep 17 00:00:00 2001 From: Henrik Niehaus Date: Sat, 17 Mar 2018 14:09:44 +0100 Subject: [PATCH 164/745] Removed unnecessary method Removed bind method from BoundParameterQuery, because this functionality is now implemented in the QueryBuilder --- src/main/java/org/influxdb/dto/BoundParameterQuery.java | 5 ----- 1 file changed, 5 deletions(-) diff --git a/src/main/java/org/influxdb/dto/BoundParameterQuery.java b/src/main/java/org/influxdb/dto/BoundParameterQuery.java index 117a0e123..a1344f2ca 100644 --- a/src/main/java/org/influxdb/dto/BoundParameterQuery.java +++ b/src/main/java/org/influxdb/dto/BoundParameterQuery.java @@ -20,11 +20,6 @@ private BoundParameterQuery(final String command, final String database) { super(command, database, true); } - public BoundParameterQuery bind(String placeholder, Object value) { - params.put(placeholder, value); - return this; - } - public String getParameterJsonWithUrlEncoded() { try { List placeholders = parsePlaceHolders(getCommand()); From 7eb0b5a963f4778bb083590386b0ea7d07055bd0 Mon Sep 17 00:00:00 2001 From: Henrik Niehaus Date: Sat, 17 Mar 2018 14:10:28 +0100 Subject: [PATCH 165/745] Added test for the different parameter types --- .../influxdb/dto/BoundParameterQueryTest.java | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/src/test/java/org/influxdb/dto/BoundParameterQueryTest.java b/src/test/java/org/influxdb/dto/BoundParameterQueryTest.java index fd71e3bab..68f29be4c 100644 --- a/src/test/java/org/influxdb/dto/BoundParameterQueryTest.java +++ b/src/test/java/org/influxdb/dto/BoundParameterQueryTest.java @@ -63,6 +63,24 @@ public void testPlaceHolderParsingWithLimitClause() throws IOException { Assert.assertEquals(params.get("bcd"), 10.0); } + @Test + public void testDifferentTypePlaceHolderParsing() throws IOException { + BoundParameterQuery query = QueryBuilder.newQuery("SELECT * FROM abc WHERE number > $number" + + " AND bool = $bool AND string = $string AND other = $object") + .forDatabase("foobar") + .bind("number", 0) + .bind("bool", true) + .bind("string", "test") + .bind("object", new Object()) + .create(); + Map params = readObject(decode(query.getParameterJsonWithUrlEncoded())); + Assert.assertEquals(4, params.size()); + Assert.assertEquals(params.get("number"), 0.0); + Assert.assertEquals(params.get("bool"), true); + Assert.assertEquals(params.get("string"), "test"); + Assert.assertTrue(((String)params.get("object")).matches("java.lang.Object@[a-z0-9]+")); + } + @Test public void testIgnoreInvalidPlaceholders() throws UnsupportedEncodingException { BoundParameterQuery query = QueryBuilder.newQuery("SELECT * FROM abc WHERE a > $") From f03fa37c41b9488e4813260078dda79903896b3a Mon Sep 17 00:00:00 2001 From: Henrik Niehaus Date: Sat, 17 Mar 2018 14:39:24 +0100 Subject: [PATCH 166/745] Added equals and hashcode --- .../org/influxdb/dto/BoundParameterQuery.java | 20 ++++++++++++ .../influxdb/dto/BoundParameterQueryTest.java | 32 +++++++++++++++++++ 2 files changed, 52 insertions(+) diff --git a/src/main/java/org/influxdb/dto/BoundParameterQuery.java b/src/main/java/org/influxdb/dto/BoundParameterQuery.java index a1344f2ca..dceb28759 100644 --- a/src/main/java/org/influxdb/dto/BoundParameterQuery.java +++ b/src/main/java/org/influxdb/dto/BoundParameterQuery.java @@ -75,6 +75,26 @@ private List parsePlaceHolders(final String command) { return placeHolders; } + @Override + public int hashCode() { + final int prime = 31; + int result = super.hashCode(); + result = prime * result + params.hashCode(); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (!super.equals(obj)) + return false; + BoundParameterQuery other = (BoundParameterQuery) obj; + if (!params.equals(other.params)) + return false; + return true; + } + public static class QueryBuilder { private BoundParameterQuery query; private String influxQL; diff --git a/src/test/java/org/influxdb/dto/BoundParameterQueryTest.java b/src/test/java/org/influxdb/dto/BoundParameterQueryTest.java index 68f29be4c..be415d2c5 100644 --- a/src/test/java/org/influxdb/dto/BoundParameterQueryTest.java +++ b/src/test/java/org/influxdb/dto/BoundParameterQueryTest.java @@ -1,5 +1,7 @@ package org.influxdb.dto; +import static org.assertj.core.api.Assertions.assertThat; + import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; @@ -119,6 +121,36 @@ public void testUnbalancedQuery() throws UnsupportedEncodingException { // expected } } + + @Test + public void testEqualsAndHashCode() { + String stringA0 = "SELECT * FROM foobar WHERE a = $a"; + String stringA1 = "SELECT * FROM foobar WHERE a = $a"; + String stringB0 = "SELECT * FROM foobar WHERE b = $b"; + + Query queryA0 = QueryBuilder.newQuery(stringA0) + .forDatabase(stringA0) + .bind("a", 0) + .create(); + Query queryA1 = QueryBuilder.newQuery(stringA1) + .forDatabase(stringA1) + .bind("a", 0) + .create(); + Query queryB0 = QueryBuilder.newQuery(stringB0) + .forDatabase(stringB0) + .bind("b", 10) + .create(); +// Query queryC0 = new Query(stringB0, stringA0); + + assertThat(queryA0).isEqualTo(queryA0); + assertThat(queryA0).isEqualTo(queryA1); + assertThat(queryA0).isNotEqualTo(queryB0); + assertThat(queryA0).isNotEqualTo("foobar"); +// assertThat(queryB0).isNotEqualTo(queryC0); + + assertThat(queryA0.hashCode()).isEqualTo(queryA1.hashCode()); + assertThat(queryA0.hashCode()).isNotEqualTo(queryB0.hashCode()); + } private Map readObject(String json) throws IOException { Buffer source = new Buffer(); From 966d781714488a6984b435e5b9a30fdcaeda6712 Mon Sep 17 00:00:00 2001 From: Henrik Niehaus Date: Sat, 17 Mar 2018 14:43:02 +0100 Subject: [PATCH 167/745] Code cleanup Fixed checkstyle issues and formatted according to project standard (hopefully) --- .../org/influxdb/dto/BoundParameterQuery.java | 189 ++++++------ .../influxdb/dto/BoundParameterQueryTest.java | 276 +++++++++--------- 2 files changed, 233 insertions(+), 232 deletions(-) diff --git a/src/main/java/org/influxdb/dto/BoundParameterQuery.java b/src/main/java/org/influxdb/dto/BoundParameterQuery.java index dceb28759..6c14eac6b 100644 --- a/src/main/java/org/influxdb/dto/BoundParameterQuery.java +++ b/src/main/java/org/influxdb/dto/BoundParameterQuery.java @@ -12,111 +12,114 @@ import java.util.regex.Pattern; import okio.Buffer; -public class BoundParameterQuery extends Query { - - private final Map params = new HashMap<>(); - - private BoundParameterQuery(final String command, final String database) { - super(command, database, true); +public final class BoundParameterQuery extends Query { + + private final Map params = new HashMap<>(); + + private BoundParameterQuery(final String command, final String database) { + super(command, database, true); + } + + public String getParameterJsonWithUrlEncoded() { + try { + List placeholders = parsePlaceHolders(getCommand()); + assurePlaceholdersAreBound(placeholders, params); + String jsonParameterObject = createJsonObject(params); + String urlEncodedJsonParameterObject = encode(jsonParameterObject); + return urlEncodedJsonParameterObject; + } catch (IOException e) { + throw new RuntimeException("Couldn't create parameter JSON object", e); } + } - public String getParameterJsonWithUrlEncoded() { - try { - List placeholders = parsePlaceHolders(getCommand()); - assurePlaceholdersAreBound(placeholders, params); - String jsonParameterObject = createJsonObject(params); - String urlEncodedJsonParameterObject = encode(jsonParameterObject); - return urlEncodedJsonParameterObject; - } catch (IOException e) { - throw new RuntimeException("Couldn't create parameter JSON object", e); - } + private void assurePlaceholdersAreBound(final List placeholders, final Map params) { + if (placeholders.size() != params.size()) { + throw new RuntimeException("Unbalanced amount of placeholders and parameters"); } - private void assurePlaceholdersAreBound(List placeholders, Map params) { - if (placeholders.size() != params.size()) { - throw new RuntimeException("Unbalanced amount of placeholders and parameters"); - } - - for (String placeholder : placeholders) { - if (params.get(placeholder) == null) { - throw new RuntimeException("Placeholder $" + placeholder + " is not bound"); - } - } + for (String placeholder : placeholders) { + if (params.get(placeholder) == null) { + throw new RuntimeException("Placeholder $" + placeholder + " is not bound"); + } } - - private String createJsonObject(final Map parameterMap) throws IOException { - Buffer b = new Buffer(); - JsonWriter writer = JsonWriter.of(b); - writer.beginObject(); - for (Entry pair : parameterMap.entrySet()) { - String name = pair.getKey(); - Object value = pair.getValue(); - if (value instanceof Number) { - writer.name(name).value((Number) value); - } else if (value instanceof String) { - writer.name(name).value((String) value); - } else if (value instanceof Boolean) { - writer.name(name).value((Boolean) value); - } else { - writer.name(name).value(value.toString()); - } - } - writer.endObject(); - return b.readString(Charset.forName("utf-8")); + } + + private String createJsonObject(final Map parameterMap) throws IOException { + Buffer b = new Buffer(); + JsonWriter writer = JsonWriter.of(b); + writer.beginObject(); + for (Entry pair : parameterMap.entrySet()) { + String name = pair.getKey(); + Object value = pair.getValue(); + if (value instanceof Number) { + writer.name(name).value((Number) value); + } else if (value instanceof String) { + writer.name(name).value((String) value); + } else if (value instanceof Boolean) { + writer.name(name).value((Boolean) value); + } else { + writer.name(name).value(value.toString()); + } + } + writer.endObject(); + return b.readString(Charset.forName("utf-8")); + } + + private List parsePlaceHolders(final String command) { + List placeHolders = new ArrayList<>(); + Pattern p = Pattern.compile("\\s+\\$(\\w+?)(?:\\s|$)"); + Matcher m = p.matcher(getCommand()); + while (m.find()) { + placeHolders.add(m.group(1)); } + return placeHolders; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = super.hashCode(); + result = prime * result + params.hashCode(); + return result; + } + + @Override + public boolean equals(final Object obj) { + if (this == obj) { + return true; + } + if (!super.equals(obj)) { + return false; + } + BoundParameterQuery other = (BoundParameterQuery) obj; + if (!params.equals(other.params)) { + return false; + } + return true; + } + + public static class QueryBuilder { + private BoundParameterQuery query; + private String influxQL; - private List parsePlaceHolders(final String command) { - List placeHolders = new ArrayList<>(); - Pattern p = Pattern.compile("\\s+\\$(\\w+?)(?:\\s|$)"); - Matcher m = p.matcher(getCommand()); - while (m.find()) { - placeHolders.add(m.group(1)); - } - return placeHolders; + public static QueryBuilder newQuery(final String influxQL) { + QueryBuilder instance = new QueryBuilder(); + instance.influxQL = influxQL; + return instance; } - @Override - public int hashCode() { - final int prime = 31; - int result = super.hashCode(); - result = prime * result + params.hashCode(); - return result; + public QueryBuilder forDatabase(final String database) { + query = new BoundParameterQuery(influxQL, database); + return this; } - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (!super.equals(obj)) - return false; - BoundParameterQuery other = (BoundParameterQuery) obj; - if (!params.equals(other.params)) - return false; - return true; + public QueryBuilder bind(final String placeholder, final Object value) { + query.params.put(placeholder, value); + return this; } - public static class QueryBuilder { - private BoundParameterQuery query; - private String influxQL; - - public static QueryBuilder newQuery(String influxQL) { - QueryBuilder instance = new QueryBuilder(); - instance.influxQL = influxQL; - return instance; - } - - public QueryBuilder forDatabase(String database) { - query = new BoundParameterQuery(influxQL, database); - return this; - } - - public QueryBuilder bind(String placeholder, Object value) { - query.params.put(placeholder, value); - return this; - } - - public BoundParameterQuery create() { - return query; - } + public BoundParameterQuery create() { + return query; } + } } diff --git a/src/test/java/org/influxdb/dto/BoundParameterQueryTest.java b/src/test/java/org/influxdb/dto/BoundParameterQueryTest.java index be415d2c5..c25ca1d39 100644 --- a/src/test/java/org/influxdb/dto/BoundParameterQueryTest.java +++ b/src/test/java/org/influxdb/dto/BoundParameterQueryTest.java @@ -26,148 +26,146 @@ @RunWith(JUnitPlatform.class) public class BoundParameterQueryTest { - @Test - public void testSingleCharacterPlaceHolderParsing() throws IOException { - BoundParameterQuery query = QueryBuilder.newQuery("SELECT * FROM abc WHERE a > $a AND b < $b") - .forDatabase("foobar") - .bind("a", 0) - .bind("b", 10) - .create(); - Map params = readObject(decode(query.getParameterJsonWithUrlEncoded())); - Assert.assertEquals(2, params.size()); - Assert.assertEquals(params.get("a"), 0.0); - Assert.assertEquals(params.get("b"), 10.0); + @Test + public void testSingleCharacterPlaceHolderParsing() throws IOException { + BoundParameterQuery query = QueryBuilder.newQuery("SELECT * FROM abc WHERE a > $a AND b < $b") + .forDatabase("foobar") + .bind("a", 0) + .bind("b", 10) + .create(); + Map params = readObject(decode(query.getParameterJsonWithUrlEncoded())); + Assert.assertEquals(2, params.size()); + Assert.assertEquals(params.get("a"), 0.0); + Assert.assertEquals(params.get("b"), 10.0); + } + + @Test + public void testPlaceHolderParsing() throws IOException { + BoundParameterQuery query = QueryBuilder.newQuery("SELECT * FROM abc WHERE a > $abc AND b < $bcd") + .forDatabase("foobar") + .bind("abc", 0) + .bind("bcd", 10) + .create(); + Map params = readObject(decode(query.getParameterJsonWithUrlEncoded())); + Assert.assertEquals(2, params.size()); + Assert.assertEquals(params.get("abc"), 0.0); + Assert.assertEquals(params.get("bcd"), 10.0); + } + + @Test + public void testPlaceHolderParsingWithLimitClause() throws IOException { + BoundParameterQuery query = QueryBuilder.newQuery("SELECT * FROM abc WHERE a > $abc AND b < $bcd LIMIT 10") + .forDatabase("foobar") + .bind("abc", 0) + .bind("bcd", 10) + .create(); + Map params = readObject(decode(query.getParameterJsonWithUrlEncoded())); + Assert.assertEquals(2, params.size()); + Assert.assertEquals(params.get("abc"), 0.0); + Assert.assertEquals(params.get("bcd"), 10.0); + } + + @Test + public void testDifferentTypePlaceHolderParsing() throws IOException { + BoundParameterQuery query = QueryBuilder.newQuery("SELECT * FROM abc WHERE number > $number" + + " AND bool = $bool AND string = $string AND other = $object") + .forDatabase("foobar") + .bind("number", 0) + .bind("bool", true) + .bind("string", "test") + .bind("object", new Object()) + .create(); + Map params = readObject(decode(query.getParameterJsonWithUrlEncoded())); + Assert.assertEquals(4, params.size()); + Assert.assertEquals(params.get("number"), 0.0); + Assert.assertEquals(params.get("bool"), true); + Assert.assertEquals(params.get("string"), "test"); + Assert.assertTrue(((String)params.get("object")).matches("java.lang.Object@[a-z0-9]+")); + } + + @Test + public void testIgnoreInvalidPlaceholders() throws UnsupportedEncodingException { + BoundParameterQuery query = QueryBuilder.newQuery("SELECT * FROM abc WHERE a > $") + .forDatabase("foobar") + .create(); + Assert.assertEquals(decode(query.getParameterJsonWithUrlEncoded()), "{}"); + + query = QueryBuilder.newQuery("SELECT * FROM abc WHERE a > $abc$cde").forDatabase("foobar").create(); + Assert.assertEquals(decode(query.getParameterJsonWithUrlEncoded()), "{}"); + } + + @Test + public void testUnbalancedQuery() throws UnsupportedEncodingException { + // too many placeholders + try { + BoundParameterQuery query = QueryBuilder.newQuery("SELECT * FROM abc WHERE a > $abc AND b < $bcd") + .forDatabase("foobar") + .bind("abc", 0) + .create(); + query.getParameterJsonWithUrlEncoded(); + Assert.fail("Expected RuntimeException because of unbalanced placeholders and parameters"); + } catch (RuntimeException rte) { + // expected } - @Test - public void testPlaceHolderParsing() throws IOException { - BoundParameterQuery query = QueryBuilder.newQuery("SELECT * FROM abc WHERE a > $abc AND b < $bcd") - .forDatabase("foobar") - .bind("abc", 0) - .bind("bcd", 10) - .create(); - Map params = readObject(decode(query.getParameterJsonWithUrlEncoded())); - Assert.assertEquals(2, params.size()); - Assert.assertEquals(params.get("abc"), 0.0); - Assert.assertEquals(params.get("bcd"), 10.0); + // too many parameters + try { + BoundParameterQuery query = QueryBuilder.newQuery("SELECT * FROM abc WHERE a > $abc") + .forDatabase("foobar") + .bind("abc", 0) + .bind("bcd", 10) + .create(); + query.getParameterJsonWithUrlEncoded(); + Assert.fail("Expected RuntimeException because of unbalanced placeholders and parameters"); + } catch (RuntimeException rte) { + // expected } - - @Test - public void testPlaceHolderParsingWithLimitClause() throws IOException { - BoundParameterQuery query = QueryBuilder.newQuery("SELECT * FROM abc WHERE a > $abc AND b < $bcd LIMIT 10") - .forDatabase("foobar") - .bind("abc", 0) - .bind("bcd", 10) - .create(); - Map params = readObject(decode(query.getParameterJsonWithUrlEncoded())); - Assert.assertEquals(2, params.size()); - Assert.assertEquals(params.get("abc"), 0.0); - Assert.assertEquals(params.get("bcd"), 10.0); - } - - @Test - public void testDifferentTypePlaceHolderParsing() throws IOException { - BoundParameterQuery query = QueryBuilder.newQuery("SELECT * FROM abc WHERE number > $number" - + " AND bool = $bool AND string = $string AND other = $object") - .forDatabase("foobar") - .bind("number", 0) - .bind("bool", true) - .bind("string", "test") - .bind("object", new Object()) - .create(); - Map params = readObject(decode(query.getParameterJsonWithUrlEncoded())); - Assert.assertEquals(4, params.size()); - Assert.assertEquals(params.get("number"), 0.0); - Assert.assertEquals(params.get("bool"), true); - Assert.assertEquals(params.get("string"), "test"); - Assert.assertTrue(((String)params.get("object")).matches("java.lang.Object@[a-z0-9]+")); - } - - @Test - public void testIgnoreInvalidPlaceholders() throws UnsupportedEncodingException { - BoundParameterQuery query = QueryBuilder.newQuery("SELECT * FROM abc WHERE a > $") - .forDatabase("foobar") - .create(); - Assert.assertEquals(decode(query.getParameterJsonWithUrlEncoded()), "{}"); - - query = QueryBuilder.newQuery("SELECT * FROM abc WHERE a > $abc$cde").forDatabase("foobar").create(); - Assert.assertEquals(decode(query.getParameterJsonWithUrlEncoded()), "{}"); + } + + @Test + public void testEqualsAndHashCode() { + String stringA0 = "SELECT * FROM foobar WHERE a = $a"; + String stringA1 = "SELECT * FROM foobar WHERE a = $a"; + String stringB0 = "SELECT * FROM foobar WHERE b = $b"; + + Query queryA0 = QueryBuilder.newQuery(stringA0) + .forDatabase(stringA0) + .bind("a", 0) + .create(); + Query queryA1 = QueryBuilder.newQuery(stringA1) + .forDatabase(stringA1) + .bind("a", 0) + .create(); + Query queryB0 = QueryBuilder.newQuery(stringB0) + .forDatabase(stringB0) + .bind("b", 10) + .create(); + + assertThat(queryA0).isEqualTo(queryA0); + assertThat(queryA0).isEqualTo(queryA1); + assertThat(queryA0).isNotEqualTo(queryB0); + assertThat(queryA0).isNotEqualTo("foobar"); + + assertThat(queryA0.hashCode()).isEqualTo(queryA1.hashCode()); + assertThat(queryA0.hashCode()).isNotEqualTo(queryB0.hashCode()); + } + + private Map readObject(String json) throws IOException { + Buffer source = new Buffer(); + source.writeString(json, Charset.forName("utf-8")); + Map params = new HashMap<>(); + JsonReader reader = JsonReader.of(source); + reader.beginObject(); + while (reader.hasNext()) { + String name = reader.nextName(); + Object value = reader.readJsonValue(); + params.put(name, value); } + reader.endObject(); + return params; + } - @Test - public void testUnbalancedQuery() throws UnsupportedEncodingException { - // too many placeholders - try { - BoundParameterQuery query = QueryBuilder.newQuery("SELECT * FROM abc WHERE a > $abc AND b < $bcd") - .forDatabase("foobar") - .bind("abc", 0) - .create(); - query.getParameterJsonWithUrlEncoded(); - Assert.fail("Expected RuntimeException because of unbalanced placeholders and parameters"); - } catch (RuntimeException rte) { - // expected - } - - // too many parameters - try { - BoundParameterQuery query = QueryBuilder.newQuery("SELECT * FROM abc WHERE a > $abc") - .forDatabase("foobar") - .bind("abc", 0) - .bind("bcd", 10) - .create(); - query.getParameterJsonWithUrlEncoded(); - Assert.fail("Expected RuntimeException because of unbalanced placeholders and parameters"); - } catch (RuntimeException rte) { - // expected - } - } - - @Test - public void testEqualsAndHashCode() { - String stringA0 = "SELECT * FROM foobar WHERE a = $a"; - String stringA1 = "SELECT * FROM foobar WHERE a = $a"; - String stringB0 = "SELECT * FROM foobar WHERE b = $b"; - - Query queryA0 = QueryBuilder.newQuery(stringA0) - .forDatabase(stringA0) - .bind("a", 0) - .create(); - Query queryA1 = QueryBuilder.newQuery(stringA1) - .forDatabase(stringA1) - .bind("a", 0) - .create(); - Query queryB0 = QueryBuilder.newQuery(stringB0) - .forDatabase(stringB0) - .bind("b", 10) - .create(); -// Query queryC0 = new Query(stringB0, stringA0); - - assertThat(queryA0).isEqualTo(queryA0); - assertThat(queryA0).isEqualTo(queryA1); - assertThat(queryA0).isNotEqualTo(queryB0); - assertThat(queryA0).isNotEqualTo("foobar"); -// assertThat(queryB0).isNotEqualTo(queryC0); - - assertThat(queryA0.hashCode()).isEqualTo(queryA1.hashCode()); - assertThat(queryA0.hashCode()).isNotEqualTo(queryB0.hashCode()); - } - - private Map readObject(String json) throws IOException { - Buffer source = new Buffer(); - source.writeString(json, Charset.forName("utf-8")); - Map params = new HashMap<>(); - JsonReader reader = JsonReader.of(source); - reader.beginObject(); - while (reader.hasNext()) { - String name = reader.nextName(); - Object value = reader.readJsonValue(); - params.put(name, value); - } - reader.endObject(); - return params; - } - - private static String decode(String str) throws UnsupportedEncodingException { - return URLDecoder.decode(str, StandardCharsets.UTF_8.toString()); - } + private static String decode(String str) throws UnsupportedEncodingException { + return URLDecoder.decode(str, StandardCharsets.UTF_8.toString()); + } } From be08bcdbc9951d3441e8e7f06e799c4915c111f9 Mon Sep 17 00:00:00 2001 From: Henrik Niehaus Date: Sat, 17 Mar 2018 15:24:16 +0100 Subject: [PATCH 168/745] Added test for BoundParameterQuery Added a test which executes a BoundParameterQuery against an actual database. --- src/test/java/org/influxdb/InfluxDBTest.java | 31 ++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/src/test/java/org/influxdb/InfluxDBTest.java b/src/test/java/org/influxdb/InfluxDBTest.java index c9b1eee21..85c4ebd3a 100644 --- a/src/test/java/org/influxdb/InfluxDBTest.java +++ b/src/test/java/org/influxdb/InfluxDBTest.java @@ -2,10 +2,12 @@ import org.influxdb.InfluxDB.LogLevel; import org.influxdb.dto.BatchPoints; +import org.influxdb.dto.BoundParameterQuery.QueryBuilder; import org.influxdb.dto.Point; import org.influxdb.dto.Pong; import org.influxdb.dto.Query; import org.influxdb.dto.QueryResult; +import org.influxdb.dto.QueryResult.Series; import org.influxdb.impl.InfluxDBImpl; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; @@ -89,6 +91,35 @@ public void testQuery() { this.influxDB.query(new Query("DROP DATABASE mydb2", "mydb")); } + @Test + public void testBoundParameterQuery() { + // set up + Point point = Point + .measurement("cpu") + .tag("atag", "test") + .addField("idle", 90L) + .addField("usertime", 9L) + .addField("system", 1L) + .build(); + this.influxDB.setDatabase(UDP_DATABASE); + this.influxDB.write(point); + + // test + Query query = QueryBuilder.newQuery("SELECT * FROM cpu WHERE atag = $atag") + .forDatabase(UDP_DATABASE) + .bind("atag", "test") + .create(); + QueryResult result = this.influxDB.query(query); + Assertions.assertTrue(result.getResults().get(0).getSeries().size() == 1); + Series series = result.getResults().get(0).getSeries().get(0); + Assertions.assertTrue(series.getValues().size() == 1); + + result = this.influxDB.query(query, TimeUnit.SECONDS); + Assertions.assertTrue(result.getResults().get(0).getSeries().size() == 1); + series = result.getResults().get(0).getSeries().get(0); + Assertions.assertTrue(series.getValues().size() == 1); + } + /** * Tests for callback query. */ From 58cd9eb26f2d941d75c224c30fd43527bad47f33 Mon Sep 17 00:00:00 2001 From: Henrik Niehaus Date: Sat, 17 Mar 2018 16:55:17 +0100 Subject: [PATCH 169/745] Improved test coverage for BoundParameterQuery --- src/test/java/org/influxdb/InfluxDBTest.java | 16 +++++++++++++++- .../influxdb/dto/BoundParameterQueryTest.java | 17 +++++++++++++++++ 2 files changed, 32 insertions(+), 1 deletion(-) diff --git a/src/test/java/org/influxdb/InfluxDBTest.java b/src/test/java/org/influxdb/InfluxDBTest.java index 85c4ebd3a..7bd817e85 100644 --- a/src/test/java/org/influxdb/InfluxDBTest.java +++ b/src/test/java/org/influxdb/InfluxDBTest.java @@ -92,7 +92,7 @@ public void testQuery() { } @Test - public void testBoundParameterQuery() { + public void testBoundParameterQuery() throws InterruptedException { // set up Point point = Point .measurement("cpu") @@ -118,6 +118,20 @@ public void testBoundParameterQuery() { Assertions.assertTrue(result.getResults().get(0).getSeries().size() == 1); series = result.getResults().get(0).getSeries().get(0); Assertions.assertTrue(series.getValues().size() == 1); + + Object waitForTestresults = new Object(); + Consumer check = (queryResult) -> { + Assertions.assertTrue(queryResult.getResults().get(0).getSeries().size() == 1); + Series s = queryResult.getResults().get(0).getSeries().get(0); + Assertions.assertTrue(s.getValues().size() == 1); + synchronized (waitForTestresults) { + waitForTestresults.notifyAll(); + } + }; + this.influxDB.query(query, 10, check); + synchronized (waitForTestresults) { + waitForTestresults.wait(2000); + } } /** diff --git a/src/test/java/org/influxdb/dto/BoundParameterQueryTest.java b/src/test/java/org/influxdb/dto/BoundParameterQueryTest.java index c25ca1d39..a5f6b9729 100644 --- a/src/test/java/org/influxdb/dto/BoundParameterQueryTest.java +++ b/src/test/java/org/influxdb/dto/BoundParameterQueryTest.java @@ -120,6 +120,18 @@ public void testUnbalancedQuery() throws UnsupportedEncodingException { } catch (RuntimeException rte) { // expected } + + // unbound placeholder + try { + BoundParameterQuery query = QueryBuilder.newQuery("SELECT * FROM abc WHERE a > $abc") + .forDatabase("foobar") + .bind("bcd", 10) + .create(); + query.getParameterJsonWithUrlEncoded(); + Assert.fail("Expected RuntimeException because of unbound placeholder"); + } catch (RuntimeException rte) { + // expected + } } @Test @@ -136,6 +148,10 @@ public void testEqualsAndHashCode() { .forDatabase(stringA1) .bind("a", 0) .create(); + Query queryA2 = QueryBuilder.newQuery(stringA1) + .forDatabase(stringA1) + .bind("a", 10) + .create(); Query queryB0 = QueryBuilder.newQuery(stringB0) .forDatabase(stringB0) .bind("b", 10) @@ -143,6 +159,7 @@ public void testEqualsAndHashCode() { assertThat(queryA0).isEqualTo(queryA0); assertThat(queryA0).isEqualTo(queryA1); + assertThat(queryA0).isNotEqualTo(queryA2); assertThat(queryA0).isNotEqualTo(queryB0); assertThat(queryA0).isNotEqualTo("foobar"); From f761e6139ac5d84b3f294acd90070fea513a9cb6 Mon Sep 17 00:00:00 2001 From: Henrik Niehaus Date: Sun, 18 Mar 2018 14:32:50 +0100 Subject: [PATCH 170/745] Removed unnecessary placeholder checks The checks in BoundParameterQuery have been removed, because InfluxDB does that already and returns appropriate error messages in case of an invalid request. --- .../org/influxdb/dto/BoundParameterQuery.java | 38 +---- .../influxdb/dto/BoundParameterQueryTest.java | 145 +++--------------- 2 files changed, 31 insertions(+), 152 deletions(-) diff --git a/src/main/java/org/influxdb/dto/BoundParameterQuery.java b/src/main/java/org/influxdb/dto/BoundParameterQuery.java index 6c14eac6b..a70b9524b 100644 --- a/src/main/java/org/influxdb/dto/BoundParameterQuery.java +++ b/src/main/java/org/influxdb/dto/BoundParameterQuery.java @@ -3,13 +3,12 @@ import com.squareup.moshi.JsonWriter; import java.io.IOException; import java.nio.charset.Charset; -import java.util.ArrayList; import java.util.HashMap; -import java.util.List; import java.util.Map; import java.util.Map.Entry; -import java.util.regex.Matcher; -import java.util.regex.Pattern; + +import org.influxdb.InfluxDBIOException; + import okio.Buffer; public final class BoundParameterQuery extends Query { @@ -22,25 +21,11 @@ private BoundParameterQuery(final String command, final String database) { public String getParameterJsonWithUrlEncoded() { try { - List placeholders = parsePlaceHolders(getCommand()); - assurePlaceholdersAreBound(placeholders, params); String jsonParameterObject = createJsonObject(params); String urlEncodedJsonParameterObject = encode(jsonParameterObject); return urlEncodedJsonParameterObject; } catch (IOException e) { - throw new RuntimeException("Couldn't create parameter JSON object", e); - } - } - - private void assurePlaceholdersAreBound(final List placeholders, final Map params) { - if (placeholders.size() != params.size()) { - throw new RuntimeException("Unbalanced amount of placeholders and parameters"); - } - - for (String placeholder : placeholders) { - if (params.get(placeholder) == null) { - throw new RuntimeException("Placeholder $" + placeholder + " is not bound"); - } + throw new InfluxDBIOException(e); } } @@ -52,29 +37,20 @@ private String createJsonObject(final Map parameterMap) throws I String name = pair.getKey(); Object value = pair.getValue(); if (value instanceof Number) { - writer.name(name).value((Number) value); + Number number = (Number) value; + writer.name(name).value(number); } else if (value instanceof String) { writer.name(name).value((String) value); } else if (value instanceof Boolean) { writer.name(name).value((Boolean) value); } else { - writer.name(name).value(value.toString()); + writer.name(name).value(String.valueOf(value)); } } writer.endObject(); return b.readString(Charset.forName("utf-8")); } - private List parsePlaceHolders(final String command) { - List placeHolders = new ArrayList<>(); - Pattern p = Pattern.compile("\\s+\\$(\\w+?)(?:\\s|$)"); - Matcher m = p.matcher(getCommand()); - while (m.find()) { - placeHolders.add(m.group(1)); - } - return placeHolders; - } - @Override public int hashCode() { final int prime = 31; diff --git a/src/test/java/org/influxdb/dto/BoundParameterQueryTest.java b/src/test/java/org/influxdb/dto/BoundParameterQueryTest.java index a5f6b9729..3ab185272 100644 --- a/src/test/java/org/influxdb/dto/BoundParameterQueryTest.java +++ b/src/test/java/org/influxdb/dto/BoundParameterQueryTest.java @@ -5,10 +5,7 @@ import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; -import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; -import java.util.HashMap; -import java.util.Map; import org.influxdb.dto.BoundParameterQuery.QueryBuilder; import org.junit.Assert; @@ -16,9 +13,8 @@ import org.junit.platform.runner.JUnitPlatform; import org.junit.runner.RunWith; -import com.squareup.moshi.JsonReader; - -import okio.Buffer; +import com.squareup.moshi.JsonAdapter; +import com.squareup.moshi.Moshi; /** * Test for the BoundParameterQuery DTO. @@ -27,111 +23,25 @@ public class BoundParameterQueryTest { @Test - public void testSingleCharacterPlaceHolderParsing() throws IOException { - BoundParameterQuery query = QueryBuilder.newQuery("SELECT * FROM abc WHERE a > $a AND b < $b") - .forDatabase("foobar") - .bind("a", 0) - .bind("b", 10) - .create(); - Map params = readObject(decode(query.getParameterJsonWithUrlEncoded())); - Assert.assertEquals(2, params.size()); - Assert.assertEquals(params.get("a"), 0.0); - Assert.assertEquals(params.get("b"), 10.0); - } - - @Test - public void testPlaceHolderParsing() throws IOException { - BoundParameterQuery query = QueryBuilder.newQuery("SELECT * FROM abc WHERE a > $abc AND b < $bcd") - .forDatabase("foobar") - .bind("abc", 0) - .bind("bcd", 10) - .create(); - Map params = readObject(decode(query.getParameterJsonWithUrlEncoded())); - Assert.assertEquals(2, params.size()); - Assert.assertEquals(params.get("abc"), 0.0); - Assert.assertEquals(params.get("bcd"), 10.0); - } - - @Test - public void testPlaceHolderParsingWithLimitClause() throws IOException { - BoundParameterQuery query = QueryBuilder.newQuery("SELECT * FROM abc WHERE a > $abc AND b < $bcd LIMIT 10") - .forDatabase("foobar") - .bind("abc", 0) - .bind("bcd", 10) - .create(); - Map params = readObject(decode(query.getParameterJsonWithUrlEncoded())); - Assert.assertEquals(2, params.size()); - Assert.assertEquals(params.get("abc"), 0.0); - Assert.assertEquals(params.get("bcd"), 10.0); - } - - @Test - public void testDifferentTypePlaceHolderParsing() throws IOException { - BoundParameterQuery query = QueryBuilder.newQuery("SELECT * FROM abc WHERE number > $number" - + " AND bool = $bool AND string = $string AND other = $object") + public void testGetParameterJsonWithUrlEncoded() throws IOException { + BoundParameterQuery query = QueryBuilder.newQuery("SELECT * FROM abc WHERE integer > $i" + + "AND double = $d AND bool = $bool AND string = $string AND other = $object") .forDatabase("foobar") - .bind("number", 0) + .bind("i", 0) + .bind("d", 1.0) .bind("bool", true) .bind("string", "test") .bind("object", new Object()) .create(); - Map params = readObject(decode(query.getParameterJsonWithUrlEncoded())); - Assert.assertEquals(4, params.size()); - Assert.assertEquals(params.get("number"), 0.0); - Assert.assertEquals(params.get("bool"), true); - Assert.assertEquals(params.get("string"), "test"); - Assert.assertTrue(((String)params.get("object")).matches("java.lang.Object@[a-z0-9]+")); - } - - @Test - public void testIgnoreInvalidPlaceholders() throws UnsupportedEncodingException { - BoundParameterQuery query = QueryBuilder.newQuery("SELECT * FROM abc WHERE a > $") - .forDatabase("foobar") - .create(); - Assert.assertEquals(decode(query.getParameterJsonWithUrlEncoded()), "{}"); - - query = QueryBuilder.newQuery("SELECT * FROM abc WHERE a > $abc$cde").forDatabase("foobar").create(); - Assert.assertEquals(decode(query.getParameterJsonWithUrlEncoded()), "{}"); - } - - @Test - public void testUnbalancedQuery() throws UnsupportedEncodingException { - // too many placeholders - try { - BoundParameterQuery query = QueryBuilder.newQuery("SELECT * FROM abc WHERE a > $abc AND b < $bcd") - .forDatabase("foobar") - .bind("abc", 0) - .create(); - query.getParameterJsonWithUrlEncoded(); - Assert.fail("Expected RuntimeException because of unbalanced placeholders and parameters"); - } catch (RuntimeException rte) { - // expected - } - - // too many parameters - try { - BoundParameterQuery query = QueryBuilder.newQuery("SELECT * FROM abc WHERE a > $abc") - .forDatabase("foobar") - .bind("abc", 0) - .bind("bcd", 10) - .create(); - query.getParameterJsonWithUrlEncoded(); - Assert.fail("Expected RuntimeException because of unbalanced placeholders and parameters"); - } catch (RuntimeException rte) { - // expected - } - - // unbound placeholder - try { - BoundParameterQuery query = QueryBuilder.newQuery("SELECT * FROM abc WHERE a > $abc") - .forDatabase("foobar") - .bind("bcd", 10) - .create(); - query.getParameterJsonWithUrlEncoded(); - Assert.fail("Expected RuntimeException because of unbound placeholder"); - } catch (RuntimeException rte) { - // expected - } + + Moshi moshi = new Moshi.Builder().build(); + JsonAdapter adapter = moshi.adapter(Point.class); + Point point = adapter.fromJson(decode(query.getParameterJsonWithUrlEncoded())); + Assert.assertEquals(0, point.i); + Assert.assertEquals(1.0, point.d, 0.0); + Assert.assertEquals(true, point.bool); + Assert.assertEquals("test", point.string); + Assert.assertTrue(point.object.matches("java.lang.Object@[a-z0-9]+")); } @Test @@ -167,22 +77,15 @@ public void testEqualsAndHashCode() { assertThat(queryA0.hashCode()).isNotEqualTo(queryB0.hashCode()); } - private Map readObject(String json) throws IOException { - Buffer source = new Buffer(); - source.writeString(json, Charset.forName("utf-8")); - Map params = new HashMap<>(); - JsonReader reader = JsonReader.of(source); - reader.beginObject(); - while (reader.hasNext()) { - String name = reader.nextName(); - Object value = reader.readJsonValue(); - params.put(name, value); - } - reader.endObject(); - return params; - } - private static String decode(String str) throws UnsupportedEncodingException { return URLDecoder.decode(str, StandardCharsets.UTF_8.toString()); } + + private static class Point { + int i; + double d; + String string; + Boolean bool; + String object; + } } From 70f8c1452c168cf9416f5f18f43081179a596405 Mon Sep 17 00:00:00 2001 From: Fernando Machado Date: Tue, 20 Mar 2018 16:38:03 +0100 Subject: [PATCH 171/745] Fixed setAccessible modif. on POJOs and changes are not reversed anymore --- src/main/java/org/influxdb/impl/InfluxDBResultMapper.java | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/main/java/org/influxdb/impl/InfluxDBResultMapper.java b/src/main/java/org/influxdb/impl/InfluxDBResultMapper.java index 2188bcd18..54f34721c 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBResultMapper.java +++ b/src/main/java/org/influxdb/impl/InfluxDBResultMapper.java @@ -203,9 +203,10 @@ void setFieldValue(final T object, final Field field, final Object value) return; } Class fieldType = field.getType(); - boolean oldAccessibleState = field.isAccessible(); try { - field.setAccessible(true); + if (!field.isAccessible()) { + field.setAccessible(true); + } if (fieldValueModified(fieldType, field, object, value) || fieldValueForPrimitivesModified(fieldType, field, object, value) || fieldValueForPrimitiveWrappersModified(fieldType, field, object, value)) { @@ -219,8 +220,6 @@ void setFieldValue(final T object, final Field field, final Object value) + "The correct type is '%s' (current field value: '%s')."; throw new InfluxDBMapperException( String.format(msg, object.getClass().getName(), field.getName(), value.getClass().getName(), value)); - } finally { - field.setAccessible(oldAccessibleState); } } From 2fc928ece4e389efc45fa19eb6dc130773c19bef Mon Sep 17 00:00:00 2001 From: Henrik Niehaus Date: Tue, 20 Mar 2018 19:22:09 +0100 Subject: [PATCH 172/745] Added paragraph for parameter binding in queries in README Added short paragraph in README, which describes the parameter binding for queries. Also added an entry for the parameter binding pull request to the changelog. --- CHANGELOG.md | 6 ++++++ README.md | 16 ++++++++++++++++ 2 files changed, 22 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2cb32802d..ecfbffb17 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,11 @@ # Changelog +## 2.10 [unreleased] + +### Features + +- Support for parameter binding in queries ("prepared statements") [PR #429](https://github.com/influxdata/influxdb-java/pull/429) + ## 2.9 [2018-02-27] ### Features diff --git a/README.md b/README.md index 6a41e89fa..8365fce41 100644 --- a/README.md +++ b/README.md @@ -263,6 +263,22 @@ this.influxDB.query(new Query("SELECT idle FROM cpu", dbName), queryResult -> { }); ``` +#### Query using parameter binding ("prepared statements", version 2.10+ required) + +If your Query is based on user input, it is good practice to use parameter binding to avoid [injection attacks](https://en.wikipedia.org/wiki/SQL_injection). +You can create queries with parameter binding with the help of the QueryBuilder: + +```java +Query query = QueryBuilder.newQuery("SELECT * FROM cpu WHERE idle > $idle AND system > $system") + .forDatabase(dbName) + .bind("idle", 90) + .bind("system", 5) + .create(); +QueryResult results = influxDB.query(query); +``` + +The values of the bind() calls are bound to the placeholders in the query ($idle, $system). + #### Batch flush interval jittering (version 2.9+ required) When using large number of influxdb-java clients against a single server it may happen that all the clients From 9e16b5cab2a466f7025510e215ee036c2a3f8cba Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Thu, 22 Mar 2018 09:56:37 +0100 Subject: [PATCH 173/745] Update retrofit from 2.3.0 -> 2.4.0 --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index cbea0c021..6a44302f7 100644 --- a/pom.xml +++ b/pom.xml @@ -248,12 +248,12 @@ com.squareup.retrofit2 retrofit - 2.3.0 + 2.4.0 com.squareup.retrofit2 converter-moshi - 2.3.0 + 2.4.0 From c41b589f67858475fa943287f950dd0e0f9ac4d1 Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Thu, 22 Mar 2018 10:53:44 +0100 Subject: [PATCH 174/745] Use java10 instead of java9 because this will superseed it, jacoco update from 0.8.0 -> 0.8.1 because only this works with java10 --- compile-and-test.sh | 2 +- pom.xml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/compile-and-test.sh b/compile-and-test.sh index 8b10906d0..99b2ba4d6 100755 --- a/compile-and-test.sh +++ b/compile-and-test.sh @@ -6,7 +6,7 @@ set -e INFLUXDB_VERSIONS="1.5 1.4 1.3 1.2 1.1" -JAVA_VERSIONS="3-jdk-8-alpine 3-jdk-9-slim" +JAVA_VERSIONS="3-jdk-8-alpine 3-jdk-10-slim" for java_version in ${JAVA_VERSIONS} diff --git a/pom.xml b/pom.xml index 6a44302f7..9232e5968 100644 --- a/pom.xml +++ b/pom.xml @@ -168,7 +168,7 @@ org.jacoco jacoco-maven-plugin - 0.8.0 + 0.8.1 From 8721f99e829cc9ff91f20d335ddbbae6e59abc3b Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Sat, 24 Mar 2018 11:31:44 +0100 Subject: [PATCH 175/745] Speed up tracis ci build be creating a matrix of java version and influxdb version --- .travis.yml | 22 +++++++++++-------- compile-and-test.sh | 51 ++++++++++++++++++++------------------------- 2 files changed, 36 insertions(+), 37 deletions(-) diff --git a/.travis.yml b/.travis.yml index 434bf711d..b768d1b22 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,16 +1,20 @@ -language: java sudo: required -jdk: - - oraclejdk8 - -addons: - apt: - packages: - - oracle-java8-installer # Updates JDK 8 to the latest available. - services: - docker + +env: + - MAVEN_JAVA_VERSION=3-jdk-8-slim INFLUXDB_VERSION=1.5 + - MAVEN_JAVA_VERSION=3-jdk-8-slim INFLUXDB_VERSION=1.4 + - MAVEN_JAVA_VERSION=3-jdk-8-slim INFLUXDB_VERSION=1.3 + - MAVEN_JAVA_VERSION=3-jdk-8-slim INFLUXDB_VERSION=1.2 + - MAVEN_JAVA_VERSION=3-jdk-8-slim INFLUXDB_VERSION=1.1 + - MAVEN_JAVA_VERSION=3-jdk-10-slim INFLUXDB_VERSION=1.5 + - MAVEN_JAVA_VERSION=3-jdk-10-slim INFLUXDB_VERSION=1.4 + - MAVEN_JAVA_VERSION=3-jdk-10-slim INFLUXDB_VERSION=1.3 + - MAVEN_JAVA_VERSION=3-jdk-10-slim INFLUXDB_VERSION=1.2 + - MAVEN_JAVA_VERSION=3-jdk-10-slim INFLUXDB_VERSION=1.1 + script: ./compile-and-test.sh after_success: diff --git a/compile-and-test.sh b/compile-and-test.sh index 99b2ba4d6..7523f6e8b 100755 --- a/compile-and-test.sh +++ b/compile-and-test.sh @@ -4,36 +4,31 @@ # set -e -INFLUXDB_VERSIONS="1.5 1.4 1.3 1.2 1.1" +DEFAULT_INFLUXDB_VERSION="1.5" +DEFAULT_MAVEN_JAVA_VERSION="3-jdk-10-slim" -JAVA_VERSIONS="3-jdk-8-alpine 3-jdk-10-slim" +INFLUXDB_VERSION="${INFLUXDB_VERSION:-$DEFAULT_INFLUXDB_VERSION}" +MAVEN_JAVA_VERSION="${MAVEN_JAVA_VERSION:-$DEFAULT_MAVEN_JAVA_VERSION}" +echo "Run tests with maven:${MAVEN_JAVA_VERSION} on onfluxdb-${INFLUXDB_VERSION}" -for java_version in ${JAVA_VERSIONS} -do - echo "Run tests with maven:${java_version}" -for version in ${INFLUXDB_VERSIONS} -do - echo "Tesing againts influxdb ${version}" - docker kill influxdb || true - docker rm influxdb || true - docker pull influxdb:${version}-alpine || true - docker run \ - --detach \ - --name influxdb \ - --publish 8086:8086 \ - --publish 8089:8089/udp \ - --volume ${PWD}/influxdb.conf:/etc/influxdb/influxdb.conf \ - influxdb:${version}-alpine +docker kill influxdb || true +docker rm influxdb || true +docker pull influxdb:${version}-alpine || true +docker run \ + --detach \ + --name influxdb \ + --publish 8086:8086 \ + --publish 8089:8089/udp \ + --volume ${PWD}/influxdb.conf:/etc/influxdb/influxdb.conf \ + influxdb:${INFLUXDB_VERSION}-alpine - docker run -it --rm \ - --volume $PWD:/usr/src/mymaven \ - --volume $PWD/.m2:/root/.m2 \ - --workdir /usr/src/mymaven \ - --link=influxdb \ - --env INFLUXDB_IP=influxdb \ - maven:${java_version} mvn clean install +docker run -it --rm \ + --volume $PWD:/usr/src/mymaven \ + --volume $PWD/.m2:/root/.m2 \ + --workdir /usr/src/mymaven \ + --link=influxdb \ + --env INFLUXDB_IP=influxdb \ + maven:${MAVEN_JAVA_VERSION} mvn clean install - docker kill influxdb || true -done -done +docker kill influxdb || true From 23776a8866dafc4361e23925fe984f1a58b806c2 Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Sat, 24 Mar 2018 11:49:10 +0100 Subject: [PATCH 176/745] Further speed up, only test the most recent influx with the old java8 --- .travis.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index b768d1b22..2117c828c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,19 +1,19 @@ +language: java + sudo: required services: - docker +# We test against all influxdb versions with the most actual JDK. +# Test only the most recent influxdb version with JDK8 which will be legacy soon. env: - - MAVEN_JAVA_VERSION=3-jdk-8-slim INFLUXDB_VERSION=1.5 - - MAVEN_JAVA_VERSION=3-jdk-8-slim INFLUXDB_VERSION=1.4 - - MAVEN_JAVA_VERSION=3-jdk-8-slim INFLUXDB_VERSION=1.3 - - MAVEN_JAVA_VERSION=3-jdk-8-slim INFLUXDB_VERSION=1.2 - - MAVEN_JAVA_VERSION=3-jdk-8-slim INFLUXDB_VERSION=1.1 - MAVEN_JAVA_VERSION=3-jdk-10-slim INFLUXDB_VERSION=1.5 - MAVEN_JAVA_VERSION=3-jdk-10-slim INFLUXDB_VERSION=1.4 - MAVEN_JAVA_VERSION=3-jdk-10-slim INFLUXDB_VERSION=1.3 - MAVEN_JAVA_VERSION=3-jdk-10-slim INFLUXDB_VERSION=1.2 - MAVEN_JAVA_VERSION=3-jdk-10-slim INFLUXDB_VERSION=1.1 + - MAVEN_JAVA_VERSION=3-jdk-8-slim INFLUXDB_VERSION=1.5 script: ./compile-and-test.sh From 6953795cbe2a56762e55482fbb9d5b51c4a23d37 Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Sat, 24 Mar 2018 11:49:45 +0100 Subject: [PATCH 177/745] Reduce the amount of points written in the performance test by factor 10 and therefor gain even more test speed --- src/test/java/org/influxdb/PerformanceTests.java | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/test/java/org/influxdb/PerformanceTests.java b/src/test/java/org/influxdb/PerformanceTests.java index 1fd0a0444..65055c507 100644 --- a/src/test/java/org/influxdb/PerformanceTests.java +++ b/src/test/java/org/influxdb/PerformanceTests.java @@ -171,11 +171,11 @@ protected void check(InvocationOnMock invocation) { } }; - answer.params.put("startTime", System.currentTimeMillis() + 80000); + answer.params.put("startTime", System.currentTimeMillis() + 8000); doAnswer(answer).when(spy).write(any(BatchPoints.class)); spy.createDatabase(dbName); - BatchOptions batchOptions = BatchOptions.DEFAULTS.actions(100000).flushDuration(20000).bufferLimit(3000000).exceptionHandler((points, throwable) -> { + BatchOptions batchOptions = BatchOptions.DEFAULTS.actions(10000).flushDuration(2000).bufferLimit(300000).exceptionHandler((points, throwable) -> { System.out.println("+++++++++++ exceptionHandler +++++++++++"); System.out.println(throwable); System.out.println("++++++++++++++++++++++++++++++++++++++++"); @@ -185,17 +185,17 @@ protected void check(InvocationOnMock invocation) { spy.enableBatch(batchOptions); String rp = TestUtils.defaultRetentionPolicy(spy.version()); - for (long i = 0; i < 400000; i++) { + for (long i = 0; i < 40000; i++) { Point point = Point.measurement("s").time(i, TimeUnit.MILLISECONDS).addField("v", 1.0).build(); spy.write(dbName, rp, point); } System.out.println("sleep"); - Thread.sleep(120000); + Thread.sleep(12000); try { QueryResult result = spy.query(new Query("select count(v) from s", dbName)); double d = Double.parseDouble(result.getResults().get(0).getSeries().get(0).getValues().get(0).get(1).toString()); - Assertions.assertEquals(400000, d); + Assertions.assertEquals(40000, d); } catch (Exception e) { System.out.println("+++++++++++++++++count() +++++++++++++++++++++"); System.out.println(e); From 39d770774146c21c1d4865ffbd330a7fa5f4d881 Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Sat, 24 Mar 2018 11:56:31 +0100 Subject: [PATCH 178/745] Do not execute any java commands in the build --- .travis.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 2117c828c..93b2893eb 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,5 +1,3 @@ -language: java - sudo: required services: From a5abe4548f1a4b91319af1476a94e9a2ab79ea02 Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Sat, 24 Mar 2018 13:46:51 +0100 Subject: [PATCH 179/745] Update surefire plugin from 2.20.0 -> 2.21.0 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 9232e5968..4eb8b322c 100644 --- a/pom.xml +++ b/pom.xml @@ -88,7 +88,7 @@ org.apache.maven.plugins maven-surefire-plugin - 2.20 + 2.21.0 org.apache.maven.plugins From 997992ec51a7d9ed0bc08ae54021e33394d19eac Mon Sep 17 00:00:00 2001 From: kub Date: Fri, 23 Mar 2018 16:21:10 +0100 Subject: [PATCH 180/745] allow to figure out, whether the Point.Builder has any fields since the build mehtod contains validation for fields emptiness, there should be also way, how to figure out, whether the Builder contains any fields (to prevent the build method from throwing an exception) --- src/main/java/org/influxdb/dto/Point.java | 9 +++++++++ src/test/java/org/influxdb/dto/PointTest.java | 9 +++++++++ 2 files changed, 18 insertions(+) diff --git a/src/main/java/org/influxdb/dto/Point.java b/src/main/java/org/influxdb/dto/Point.java index acac4a82d..97122ca54 100644 --- a/src/main/java/org/influxdb/dto/Point.java +++ b/src/main/java/org/influxdb/dto/Point.java @@ -187,6 +187,15 @@ public Builder time(final long timeToSet, final TimeUnit precisionToSet) { return this; } + /** + * Does this builder contain any fields? + * + * @return true, if the builder contains any fields, false otherwise. + */ + public boolean hasFields() { + return !fields.isEmpty(); + } + /** * Create a new Point. * diff --git a/src/test/java/org/influxdb/dto/PointTest.java b/src/test/java/org/influxdb/dto/PointTest.java index e828fd9cc..33e11f631 100644 --- a/src/test/java/org/influxdb/dto/PointTest.java +++ b/src/test/java/org/influxdb/dto/PointTest.java @@ -330,4 +330,13 @@ public void testUnEquals() throws Exception { // THEN equals returns true assertThat(equals).isEqualTo(false); } + + @Test + public void testBuilderHasFields() { + Point.Builder pointBuilder = Point.measurement("nulltest").time(1, TimeUnit.NANOSECONDS).tag("foo", "bar"); + assertThat(pointBuilder.hasFields()).isFalse(); + + pointBuilder.addField("testfield", 256); + assertThat(pointBuilder.hasFields()).isTrue(); + } } From 06416c309005ff29c5c10fcd8e7d23d083282778 Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Mon, 9 Apr 2018 13:59:00 +0200 Subject: [PATCH 181/745] Update junit from 5.1.0 -> 5.1.0 --- pom.xml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pom.xml b/pom.xml index 4eb8b322c..4120fd6ed 100644 --- a/pom.xml +++ b/pom.xml @@ -218,13 +218,13 @@ org.junit.jupiter junit-jupiter-engine - 5.1.0 + 5.1.1 test org.junit.platform junit-platform-runner - 1.1.0 + 1.1.1 test @@ -242,7 +242,7 @@ org.mockito mockito-core - 2.15.0 + 2.18.0 test From 40c63d4caa8db4e235a45f63c31a9eeb9d5556e5 Mon Sep 17 00:00:00 2001 From: Tomas Klapka Date: Thu, 26 Apr 2018 16:41:36 +0200 Subject: [PATCH 182/745] changing docker image for tests in main pom --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 4120fd6ed..a348e7b29 100644 --- a/pom.xml +++ b/pom.xml @@ -272,7 +272,7 @@ release - influxdb:latest + influxdb:alpine From 55e159aeb30b6336d6ba46923708ca0b57b3a31b Mon Sep 17 00:00:00 2001 From: Tomas Klapka Date: Thu, 26 Apr 2018 16:44:31 +0200 Subject: [PATCH 183/745] [maven-release-plugin] prepare release influxdb-java-2.10 --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index a348e7b29..e09253a25 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.influxdb influxdb-java jar - 2.10-SNAPSHOT + 2.10 influxdb java bindings Java API to access the InfluxDB REST API http://www.influxdb.org @@ -28,7 +28,7 @@ scm:git:git@github.com:influxdata/influxdb-java.git scm:git:git@github.com:influxdata/influxdb-java.git git@github.com:influxdata/influxdb-java.git - HEAD + influxdb-java-2.10 From 4a6d3ce3d059d34499d92f362587804b9b51110d Mon Sep 17 00:00:00 2001 From: Tomas Klapka Date: Thu, 26 Apr 2018 16:44:39 +0200 Subject: [PATCH 184/745] [maven-release-plugin] prepare for next development iteration --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index e09253a25..6a7723db4 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.influxdb influxdb-java jar - 2.10 + 2.11-SNAPSHOT influxdb java bindings Java API to access the InfluxDB REST API http://www.influxdb.org @@ -28,7 +28,7 @@ scm:git:git@github.com:influxdata/influxdb-java.git scm:git:git@github.com:influxdata/influxdb-java.git git@github.com:influxdata/influxdb-java.git - influxdb-java-2.10 + HEAD From 9309b23d838fb02ff010fab5a0f5027cd61e0708 Mon Sep 17 00:00:00 2001 From: ivankudibal Date: Thu, 3 May 2018 08:05:35 +0200 Subject: [PATCH 185/745] Update the Changelog with 2.10 release inforomation --- CHANGELOG.md | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ecfbffb17..076c64c1a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,10 +1,25 @@ # Changelog -## 2.10 [unreleased] +## 2.11 [unreleased] + +- Support dynamic measurement name in InfluxDBResultMapper [PR #423] (https://github.com/influxdata/influxdb-java/pull/423) + +## 2.10 [2018-04-26] + +### Fixes +- Fix IllegalAccessException on setting value to POJOs, InfluxDBResultMapper is now more thread-safe [PR #432] (https://github.com/influxdata/influxdb-java/pull/432) ### Features - Support for parameter binding in queries ("prepared statements") [PR #429](https://github.com/influxdata/influxdb-java/pull/429) +- Allow to figure out whether the Point.Builder has any field or not [PR #434] (https://github.com/influxdata/influxdb-java/pull/434) + +### Improvements + +- Performance: use chained StringBuilder calls instead of single calls [PR #426] (https://github.com/influxdata/influxdb-java/pull/426) +- Performance: Escape fields and keys more efficiently [PR #424] (https://github.com/influxdata/influxdb-java/pull/424) +- Build: Speed up travis build [PR #435] (https://github.com/influxdata/influxdb-java/pull/435) +- Test: Update junit from 5.1.0 to 5.1.1 [PR #441] (https://github.com/influxdata/influxdb-java/pull/441) ## 2.9 [2018-02-27] From e573475972850d3d5b486f673dc8558a568ee051 Mon Sep 17 00:00:00 2001 From: ivankudibal Date: Thu, 3 May 2018 08:08:49 +0200 Subject: [PATCH 186/745] fix markdown style --- CHANGELOG.md | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 076c64c1a..48467b140 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,24 +2,24 @@ ## 2.11 [unreleased] -- Support dynamic measurement name in InfluxDBResultMapper [PR #423] (https://github.com/influxdata/influxdb-java/pull/423) +- Support dynamic measurement name in InfluxDBResultMapper [PR #423](https://github.com/influxdata/influxdb-java/pull/423) ## 2.10 [2018-04-26] ### Fixes -- Fix IllegalAccessException on setting value to POJOs, InfluxDBResultMapper is now more thread-safe [PR #432] (https://github.com/influxdata/influxdb-java/pull/432) +- Fix IllegalAccessException on setting value to POJOs, InfluxDBResultMapper is now more thread-safe [PR #432](https://github.com/influxdata/influxdb-java/pull/432) ### Features - Support for parameter binding in queries ("prepared statements") [PR #429](https://github.com/influxdata/influxdb-java/pull/429) -- Allow to figure out whether the Point.Builder has any field or not [PR #434] (https://github.com/influxdata/influxdb-java/pull/434) +- Allow to figure out whether the Point.Builder has any field or not [PR #434](https://github.com/influxdata/influxdb-java/pull/434) ### Improvements -- Performance: use chained StringBuilder calls instead of single calls [PR #426] (https://github.com/influxdata/influxdb-java/pull/426) -- Performance: Escape fields and keys more efficiently [PR #424] (https://github.com/influxdata/influxdb-java/pull/424) -- Build: Speed up travis build [PR #435] (https://github.com/influxdata/influxdb-java/pull/435) -- Test: Update junit from 5.1.0 to 5.1.1 [PR #441] (https://github.com/influxdata/influxdb-java/pull/441) +- Performance: use chained StringBuilder calls instead of single calls [PR #426](https://github.com/influxdata/influxdb-java/pull/426) +- Performance: Escape fields and keys more efficiently [PR #424](https://github.com/influxdata/influxdb-java/pull/424) +- Build: Speed up travis build [PR #435](https://github.com/influxdata/influxdb-java/pull/435) +- Test: Update junit from 5.1.0 to 5.1.1 [PR #441](https://github.com/influxdata/influxdb-java/pull/441) ## 2.9 [2018-02-27] From 92530623d085a9455ba912f84e44b92a0dd61a0c Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Tue, 8 May 2018 10:36:15 +0200 Subject: [PATCH 187/745] update junit from 5.1.1 -> 5.2.0 --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 6a7723db4..659d934ca 100644 --- a/pom.xml +++ b/pom.xml @@ -218,13 +218,13 @@ org.junit.jupiter junit-jupiter-engine - 5.1.1 + 5.2.0 test org.junit.platform junit-platform-runner - 1.1.1 + 1.2.0 test From f5634ce5205ec24160c34ba77f42c2139b42471c Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Wed, 9 May 2018 10:18:16 +0700 Subject: [PATCH 188/745] fix build javadocs failed + fix unit test --- src/main/java/org/influxdb/InfluxDB.java | 22 ++++++++++++++++++++-- src/main/java/org/influxdb/dto/Point.java | 3 +++ 2 files changed, 23 insertions(+), 2 deletions(-) diff --git a/src/main/java/org/influxdb/InfluxDB.java b/src/main/java/org/influxdb/InfluxDB.java index 7ff02165a..ec9b7b32a 100644 --- a/src/main/java/org/influxdb/InfluxDB.java +++ b/src/main/java/org/influxdb/InfluxDB.java @@ -299,9 +299,18 @@ public void write(final String database, final String retentionPolicy, /** * Write a set of Points to the influxdb database with the string records. * - * {@linkplain "https://github.com/influxdb/influxdb/pull/2696"} + * @see 2696 * + * @param database + * the name of the database to write + * @param retentionPolicy + * the retentionPolicy to use + * @param consistency + * the ConsistencyLevel to use + * @param precision + * the time precision to use * @param records + * the points in the correct lineprotocol. */ public void write(final String database, final String retentionPolicy, final ConsistencyLevel consistency, final TimeUnit precision, final String records); @@ -326,9 +335,18 @@ public void write(final String database, final String retentionPolicy, /** * Write a set of Points to the influxdb database with the list of string records. * - * {@linkplain "https://github.com/influxdb/influxdb/pull/2696"} + * @see 2696 * + * @param database + * the name of the database to write + * @param retentionPolicy + * the retentionPolicy to use + * @param consistency + * the ConsistencyLevel to use + * @param precision + * the time precision to use * @param records + * the List of points in the correct lineprotocol. */ public void write(final String database, final String retentionPolicy, final ConsistencyLevel consistency, final TimeUnit precision, final List records); diff --git a/src/main/java/org/influxdb/dto/Point.java b/src/main/java/org/influxdb/dto/Point.java index 67f486762..fd0175c41 100644 --- a/src/main/java/org/influxdb/dto/Point.java +++ b/src/main/java/org/influxdb/dto/Point.java @@ -423,6 +423,9 @@ private void formatedTime(final StringBuilder sb) { } private StringBuilder formatedTime(final StringBuilder sb, final TimeUnit precision) { + if (this.time == null || this.precision == null) { + return sb; + } sb.append(" ").append(precision.convert(this.time, this.precision)); return sb; } From 348faa117b9790865d1744aa7b1b5992d293af5d Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Fri, 11 May 2018 14:51:21 +0700 Subject: [PATCH 189/745] set method accessibility to default --- src/main/java/org/influxdb/dto/BatchPoints.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/org/influxdb/dto/BatchPoints.java b/src/main/java/org/influxdb/dto/BatchPoints.java index a2f484258..9d5fc9328 100644 --- a/src/main/java/org/influxdb/dto/BatchPoints.java +++ b/src/main/java/org/influxdb/dto/BatchPoints.java @@ -211,7 +211,7 @@ public TimeUnit getPrecision() { /** * @param precision the time precision to set for the batch points */ - public void setPrecision(final TimeUnit precision) { + void setPrecision(final TimeUnit precision) { this.precision = precision; } From 565c535c9c7382e90eed4366abb8d0de31e5406a Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Fri, 11 May 2018 17:13:59 +0700 Subject: [PATCH 190/745] change log entry for PR #321 --- CHANGELOG.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 48467b140..24e142fa6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,9 @@ ## 2.11 [unreleased] +### Features + +- Allow write precision of TimeUnit other than Nanoseconds [PR #321](https://github.com/influxdata/influxdb-java/pull/321) - Support dynamic measurement name in InfluxDBResultMapper [PR #423](https://github.com/influxdata/influxdb-java/pull/423) ## 2.10 [2018-04-26] From aa09c1fa493a1c67b97baf8f0e6b409235dfb1a9 Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Sun, 13 May 2018 08:53:09 +0200 Subject: [PATCH 191/745] We are at 2.10 already --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 8365fce41..b04221c9d 100644 --- a/README.md +++ b/README.md @@ -303,12 +303,12 @@ The latest version for maven dependence: org.influxdb influxdb-java - 2.9 + 2.10 ``` Or when using with gradle: ```groovy -compile 'org.influxdb:influxdb-java:2.9' +compile 'org.influxdb:influxdb-java:2.10' ``` For version change history have a look at [ChangeLog](https://github.com/influxdata/influxdb-java/blob/master/CHANGELOG.md). From c9ec49c934364129fd21f649f79a76c2d294f8e4 Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Sun, 13 May 2018 09:20:05 +0200 Subject: [PATCH 192/745] update test dependencies --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 659d934ca..13da8cd69 100644 --- a/pom.xml +++ b/pom.xml @@ -236,13 +236,13 @@ org.assertj assertj-core - 3.9.1 + 3.10.0 test org.mockito mockito-core - 2.18.0 + 2.18.3 test From 46abc4c561e0b738bf76a694cec1db3ddf2ee52f Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Tue, 22 May 2018 15:07:18 +0700 Subject: [PATCH 193/745] issues #413 : Debug mode which allows HTTP requests being sent to the database to be logged --- src/main/java/org/influxdb/InfluxDB.java | 6 +++ .../java/org/influxdb/impl/InfluxDBImpl.java | 28 ++++++++++- .../org/influxdb/InfluxDBLogLevelTest.java | 50 +++++++++++++++++++ 3 files changed, 82 insertions(+), 2 deletions(-) create mode 100644 src/test/java/org/influxdb/InfluxDBLogLevelTest.java diff --git a/src/main/java/org/influxdb/InfluxDB.java b/src/main/java/org/influxdb/InfluxDB.java index ec9b7b32a..12f30ebe3 100644 --- a/src/main/java/org/influxdb/InfluxDB.java +++ b/src/main/java/org/influxdb/InfluxDB.java @@ -26,6 +26,12 @@ */ public interface InfluxDB { + /** + * The system property key to set the http logging level across the JVM. + * @see LogLevel for available values + */ + public static final String LOG_LEVEL_PROP = "org.influxdb.InfluxDB.logLevel"; + /** Controls the level of logging of the REST layer. */ public enum LogLevel { /** No logging. */ diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index 076072192..c7d6ef3db 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -87,7 +87,7 @@ public InfluxDBImpl(final String url, final String username, final String passwo this.username = username; this.password = password; this.loggingInterceptor = new HttpLoggingInterceptor(); - this.loggingInterceptor.setLevel(Level.NONE); + setLogLevel(); this.gzipRequestInterceptor = new GzipRequestInterceptor(); this.retrofit = new Retrofit.Builder() .baseUrl(url) @@ -105,7 +105,7 @@ public InfluxDBImpl(final String url, final String username, final String passwo this.username = username; this.password = password; this.loggingInterceptor = new HttpLoggingInterceptor(); - this.loggingInterceptor.setLevel(Level.NONE); + setLogLevel(); this.gzipRequestInterceptor = new GzipRequestInterceptor(); this.retrofit = new Retrofit.Builder() .baseUrl(url) @@ -739,4 +739,28 @@ public void dropRetentionPolicy(final String rpName, final String database) { execute(this.influxDBService.postQuery(this.username, this.password, Query.encode(queryBuilder.toString()))); } + + private void setLogLevel() { + String value = System.getProperty(LOG_LEVEL_PROP); + + if (value == null) { + logLevel = LogLevel.NONE; + } else { + switch (value) { + case "BASIC": + logLevel = LogLevel.BASIC; + break; + case "HEADERS": + logLevel = LogLevel.HEADERS; + break; + case "FULL": + logLevel = LogLevel.FULL; + break; + default: + logLevel = LogLevel.NONE; + } + } + + setLogLevel(logLevel); + } } diff --git a/src/test/java/org/influxdb/InfluxDBLogLevelTest.java b/src/test/java/org/influxdb/InfluxDBLogLevelTest.java new file mode 100644 index 000000000..bbd6973aa --- /dev/null +++ b/src/test/java/org/influxdb/InfluxDBLogLevelTest.java @@ -0,0 +1,50 @@ +package org.influxdb; + +import java.util.Arrays; +import java.util.List; +import java.util.Optional; + +import org.influxdb.dto.Pong; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.platform.runner.JUnitPlatform; +import org.junit.runner.RunWith; + +/** + * Test the InfluxDBimpl log level setting from system property. + * + * @author hoan.le [at] bonitoo.io + * + */ +@RunWith(JUnitPlatform.class) +public class InfluxDBLogLevelTest { + @Test + public void testLogLevelProperties() { + + String oldLogLevel = System.getProperty(InfluxDB.LOG_LEVEL_PROP); + List logLevels = Arrays.asList(null, "NONE", "BASIC", "HEADERS", "FULL", "abc"); + logLevels.forEach(logLevel -> { + System.out.println("LogLevel = " + logLevel); + Optional.ofNullable(logLevel).ifPresent(value -> { + System.setProperty(InfluxDB.LOG_LEVEL_PROP, value); + }); + + try { + InfluxDB influxDB = TestUtils.connectToInfluxDB(); + Pong result = influxDB.ping(); + Assertions.assertNotNull(result); + influxDB.close(); + } catch (Exception e) { + Assertions.fail(e); + } + }); + + if (oldLogLevel == null) { + System.clearProperty(InfluxDB.LOG_LEVEL_PROP); + } else { + System.setProperty(InfluxDB.LOG_LEVEL_PROP, oldLogLevel); + } + + + } +} From ad15f338ded2c812b84cfd0279de56fab0d7a011 Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Fri, 25 May 2018 12:39:18 +0700 Subject: [PATCH 194/745] fix reviewing comments --- pom.xml | 6 +++ .../java/org/influxdb/impl/InfluxDBImpl.java | 16 +++---- .../org/influxdb/InfluxDBLogLevelTest.java | 47 ++++++++++--------- 3 files changed, 38 insertions(+), 31 deletions(-) diff --git a/pom.xml b/pom.xml index 13da8cd69..c20f32f29 100644 --- a/pom.xml +++ b/pom.xml @@ -245,6 +245,12 @@ 2.18.3 test + + com.github.stephenc.jcip + jcip-annotations + 1.0-1 + test + com.squareup.retrofit2 retrofit diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index c7d6ef3db..4fbfa93c9 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -87,7 +87,7 @@ public InfluxDBImpl(final String url, final String username, final String passwo this.username = username; this.password = password; this.loggingInterceptor = new HttpLoggingInterceptor(); - setLogLevel(); + initLogLevel(); this.gzipRequestInterceptor = new GzipRequestInterceptor(); this.retrofit = new Retrofit.Builder() .baseUrl(url) @@ -105,7 +105,7 @@ public InfluxDBImpl(final String url, final String username, final String passwo this.username = username; this.password = password; this.loggingInterceptor = new HttpLoggingInterceptor(); - setLogLevel(); + initLogLevel(); this.gzipRequestInterceptor = new GzipRequestInterceptor(); this.retrofit = new Retrofit.Builder() .baseUrl(url) @@ -740,13 +740,13 @@ public void dropRetentionPolicy(final String rpName, final String database) { Query.encode(queryBuilder.toString()))); } - private void setLogLevel() { + private void initLogLevel() { String value = System.getProperty(LOG_LEVEL_PROP); - if (value == null) { - logLevel = LogLevel.NONE; - } else { - switch (value) { + LogLevel logLevel = LogLevel.NONE; + + if (value != null) { + switch (value.toUpperCase()) { case "BASIC": logLevel = LogLevel.BASIC; break; @@ -757,10 +757,8 @@ private void setLogLevel() { logLevel = LogLevel.FULL; break; default: - logLevel = LogLevel.NONE; } } - setLogLevel(logLevel); } } diff --git a/src/test/java/org/influxdb/InfluxDBLogLevelTest.java b/src/test/java/org/influxdb/InfluxDBLogLevelTest.java index bbd6973aa..5003b3f48 100644 --- a/src/test/java/org/influxdb/InfluxDBLogLevelTest.java +++ b/src/test/java/org/influxdb/InfluxDBLogLevelTest.java @@ -10,6 +10,8 @@ import org.junit.platform.runner.JUnitPlatform; import org.junit.runner.RunWith; +import net.jcip.annotations.NotThreadSafe; + /** * Test the InfluxDBimpl log level setting from system property. * @@ -17,34 +19,35 @@ * */ @RunWith(JUnitPlatform.class) +@NotThreadSafe public class InfluxDBLogLevelTest { @Test public void testLogLevelProperties() { - String oldLogLevel = System.getProperty(InfluxDB.LOG_LEVEL_PROP); - List logLevels = Arrays.asList(null, "NONE", "BASIC", "HEADERS", "FULL", "abc"); - logLevels.forEach(logLevel -> { - System.out.println("LogLevel = " + logLevel); - Optional.ofNullable(logLevel).ifPresent(value -> { - System.setProperty(InfluxDB.LOG_LEVEL_PROP, value); + try { + List logLevels = Arrays.asList(null, "NONE", "BASIC", "HEADERS", "FULL", "abc"); + logLevels.forEach(logLevel -> { + System.out.println("LogLevel = " + logLevel); + Optional.ofNullable(logLevel).ifPresent(value -> { + System.setProperty(InfluxDB.LOG_LEVEL_PROP, value); + }); + + try { + InfluxDB influxDB = TestUtils.connectToInfluxDB(); + Pong result = influxDB.ping(); + Assertions.assertNotNull(result); + influxDB.close(); + } catch (Exception e) { + Assertions.fail(e); + } }); - - try { - InfluxDB influxDB = TestUtils.connectToInfluxDB(); - Pong result = influxDB.ping(); - Assertions.assertNotNull(result); - influxDB.close(); - } catch (Exception e) { - Assertions.fail(e); + } finally { + if (oldLogLevel == null) { + System.clearProperty(InfluxDB.LOG_LEVEL_PROP); + } else { + System.setProperty(InfluxDB.LOG_LEVEL_PROP, oldLogLevel); } - }); - - if (oldLogLevel == null) { - System.clearProperty(InfluxDB.LOG_LEVEL_PROP); - } else { - System.setProperty(InfluxDB.LOG_LEVEL_PROP, oldLogLevel); } - - + } } From 75b6ada4b57c8a9a16bdbc9b7f98baa0d18bd343 Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Fri, 25 May 2018 14:51:38 +0700 Subject: [PATCH 195/745] fix reviewing comments change to use Enum.valueOf(,) as reviewing comment --- src/main/java/org/influxdb/impl/InfluxDBImpl.java | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index 4fbfa93c9..e7871f261 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -742,21 +742,12 @@ public void dropRetentionPolicy(final String rpName, final String database) { private void initLogLevel() { String value = System.getProperty(LOG_LEVEL_PROP); - LogLevel logLevel = LogLevel.NONE; if (value != null) { - switch (value.toUpperCase()) { - case "BASIC": - logLevel = LogLevel.BASIC; - break; - case "HEADERS": - logLevel = LogLevel.HEADERS; - break; - case "FULL": - logLevel = LogLevel.FULL; - break; - default: + try { + logLevel = LogLevel.valueOf(value.toUpperCase()); + } catch (IllegalArgumentException e) { } } setLogLevel(logLevel); From 7534a7609ef83f513009d3ee08ca34d4837d888b Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Thu, 31 May 2018 22:14:36 +0700 Subject: [PATCH 196/745] fix reviewing comments + value change of debug mode logLevel is not supported + LOG_LEVEL_PROPERTY instead of LOG_LEVEL_PROP --- pom.xml | 6 --- src/main/java/org/influxdb/InfluxDB.java | 20 ++++++- .../java/org/influxdb/impl/InfluxDBImpl.java | 21 +++----- .../org/influxdb/InfluxDBLogLevelTest.java | 53 ------------------- src/test/java/org/influxdb/LogLevelTest.java | 33 ++++++++++++ 5 files changed, 59 insertions(+), 74 deletions(-) delete mode 100644 src/test/java/org/influxdb/InfluxDBLogLevelTest.java create mode 100644 src/test/java/org/influxdb/LogLevelTest.java diff --git a/pom.xml b/pom.xml index c20f32f29..13da8cd69 100644 --- a/pom.xml +++ b/pom.xml @@ -245,12 +245,6 @@ 2.18.3 test - - com.github.stephenc.jcip - jcip-annotations - 1.0-1 - test - com.squareup.retrofit2 retrofit diff --git a/src/main/java/org/influxdb/InfluxDB.java b/src/main/java/org/influxdb/InfluxDB.java index 12f30ebe3..749ede060 100644 --- a/src/main/java/org/influxdb/InfluxDB.java +++ b/src/main/java/org/influxdb/InfluxDB.java @@ -30,7 +30,7 @@ public interface InfluxDB { * The system property key to set the http logging level across the JVM. * @see LogLevel for available values */ - public static final String LOG_LEVEL_PROP = "org.influxdb.InfluxDB.logLevel"; + public static final String LOG_LEVEL_PROPERTY = "org.influxdb.InfluxDB.logLevel"; /** Controls the level of logging of the REST layer. */ public enum LogLevel { @@ -46,6 +46,24 @@ public enum LogLevel { * Note: This requires that the entire request and response body be buffered in memory! */ FULL; + /** + * Parses the string argument as a LogLevel constant. + * @param value a {@code String} containing the {@code LogLevel constant} + * representation to be parsed + * @return the LogLevel constant representation of the param + * or {@code NONE} for null or any invalid String representation. + */ + public static LogLevel parseLogLevel(final String value) { + LogLevel logLevel = NONE; + if (value != null) { + try { + logLevel = valueOf(value.toUpperCase()); + } catch (IllegalArgumentException e) { + } + } + + return logLevel; + } } /** diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index e7871f261..8354c478d 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -59,6 +59,7 @@ public class InfluxDBImpl implements InfluxDB { static final okhttp3.MediaType MEDIA_TYPE_STRING = MediaType.parse("text/plain"); private static final String SHOW_DATABASE_COMMAND_ENCODED = Query.encode("SHOW DATABASES"); + private static final String DEBUG_MODE_LOG_LEVEL = System.getProperty(LOG_LEVEL_PROPERTY); private final InetAddress hostAddress; private final String username; @@ -86,8 +87,10 @@ public InfluxDBImpl(final String url, final String username, final String passwo this.hostAddress = parseHostAddress(url); this.username = username; this.password = password; + this.loggingInterceptor = new HttpLoggingInterceptor(); - initLogLevel(); + setLogLevel(LogLevel.parseLogLevel(DEBUG_MODE_LOG_LEVEL)); + this.gzipRequestInterceptor = new GzipRequestInterceptor(); this.retrofit = new Retrofit.Builder() .baseUrl(url) @@ -104,8 +107,10 @@ public InfluxDBImpl(final String url, final String username, final String passwo this.hostAddress = parseHostAddress(url); this.username = username; this.password = password; + this.loggingInterceptor = new HttpLoggingInterceptor(); - initLogLevel(); + setLogLevel(LogLevel.parseLogLevel(DEBUG_MODE_LOG_LEVEL)); + this.gzipRequestInterceptor = new GzipRequestInterceptor(); this.retrofit = new Retrofit.Builder() .baseUrl(url) @@ -740,16 +745,4 @@ public void dropRetentionPolicy(final String rpName, final String database) { Query.encode(queryBuilder.toString()))); } - private void initLogLevel() { - String value = System.getProperty(LOG_LEVEL_PROP); - LogLevel logLevel = LogLevel.NONE; - - if (value != null) { - try { - logLevel = LogLevel.valueOf(value.toUpperCase()); - } catch (IllegalArgumentException e) { - } - } - setLogLevel(logLevel); - } } diff --git a/src/test/java/org/influxdb/InfluxDBLogLevelTest.java b/src/test/java/org/influxdb/InfluxDBLogLevelTest.java deleted file mode 100644 index 5003b3f48..000000000 --- a/src/test/java/org/influxdb/InfluxDBLogLevelTest.java +++ /dev/null @@ -1,53 +0,0 @@ -package org.influxdb; - -import java.util.Arrays; -import java.util.List; -import java.util.Optional; - -import org.influxdb.dto.Pong; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import org.junit.platform.runner.JUnitPlatform; -import org.junit.runner.RunWith; - -import net.jcip.annotations.NotThreadSafe; - -/** - * Test the InfluxDBimpl log level setting from system property. - * - * @author hoan.le [at] bonitoo.io - * - */ -@RunWith(JUnitPlatform.class) -@NotThreadSafe -public class InfluxDBLogLevelTest { - @Test - public void testLogLevelProperties() { - String oldLogLevel = System.getProperty(InfluxDB.LOG_LEVEL_PROP); - try { - List logLevels = Arrays.asList(null, "NONE", "BASIC", "HEADERS", "FULL", "abc"); - logLevels.forEach(logLevel -> { - System.out.println("LogLevel = " + logLevel); - Optional.ofNullable(logLevel).ifPresent(value -> { - System.setProperty(InfluxDB.LOG_LEVEL_PROP, value); - }); - - try { - InfluxDB influxDB = TestUtils.connectToInfluxDB(); - Pong result = influxDB.ping(); - Assertions.assertNotNull(result); - influxDB.close(); - } catch (Exception e) { - Assertions.fail(e); - } - }); - } finally { - if (oldLogLevel == null) { - System.clearProperty(InfluxDB.LOG_LEVEL_PROP); - } else { - System.setProperty(InfluxDB.LOG_LEVEL_PROP, oldLogLevel); - } - } - - } -} diff --git a/src/test/java/org/influxdb/LogLevelTest.java b/src/test/java/org/influxdb/LogLevelTest.java new file mode 100644 index 000000000..59205d579 --- /dev/null +++ b/src/test/java/org/influxdb/LogLevelTest.java @@ -0,0 +1,33 @@ +package org.influxdb; + +import java.util.HashMap; +import java.util.Map; + +import org.influxdb.InfluxDB.LogLevel; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.platform.runner.JUnitPlatform; +import org.junit.runner.RunWith; + +/** + * Test the InfluxDBimpl log level setting from system property. + * + * @author hoan.le [at] bonitoo.io + * + */ +@RunWith(JUnitPlatform.class) +public class LogLevelTest { + @Test + public void testParseLogLevel() { + Map logLevelMap = new HashMap<>(); + logLevelMap.put(null, LogLevel.NONE); + logLevelMap.put("NONE", LogLevel.NONE); + logLevelMap.put("BASIC", LogLevel.BASIC); + logLevelMap.put("HEADERS", LogLevel.HEADERS); + logLevelMap.put("FULL", LogLevel.FULL); + logLevelMap.put("abc", LogLevel.NONE); + logLevelMap.forEach((value, logLevel) -> { + Assertions.assertEquals(LogLevel.parseLogLevel(value), logLevel); + }); + } +} From 562442aa483dc857baf9dcc8496094fd652ae719 Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Fri, 1 Jun 2018 11:11:35 +0700 Subject: [PATCH 197/745] fix compilation failed --- src/test/java/org/influxdb/TestUtils.java | 39 ++++++++++++++++++++--- 1 file changed, 34 insertions(+), 5 deletions(-) diff --git a/src/test/java/org/influxdb/TestUtils.java b/src/test/java/org/influxdb/TestUtils.java index 8eb4224f7..78f81c095 100644 --- a/src/test/java/org/influxdb/TestUtils.java +++ b/src/test/java/org/influxdb/TestUtils.java @@ -4,11 +4,13 @@ import org.influxdb.dto.Pong; import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; import java.util.Map; public class TestUtils { - public static String getInfluxURL() { + public static String getInfluxURL() { String ip = "http://127.0.0.1:8086"; Map getenv = System.getenv(); @@ -18,7 +20,36 @@ public static String getInfluxURL() { return ip; } - + public static String getInfluxHost() { + URL url; + try { + url = new URL(getInfluxURL()); + } catch (MalformedURLException e) { + return null; + } + return url.getHost(); + } + + public static String getInfluxPORT(boolean apiPort) { + if(apiPort) { + URL url; + try { + url = new URL(getInfluxURL()); + } catch (MalformedURLException e) { + return null; + } + return Integer.toString(url.getPort()); + } + else { + Map getenv = System.getenv(); + String port = "8096"; + if (getenv.containsKey("INFLUXDB_PORT_COLLECTD")) { + port = getenv.get("INFLUXDB_PORT_COLLECTD"); + } + return port; + } + } + public static String getRandomMeasurement() { return "measurement_" + System.nanoTime(); } @@ -42,9 +73,7 @@ public static InfluxDB connectToInfluxDB( final OkHttpClient.Builder client) thr } else { clientToUse = client; } - InfluxDB influxDB = InfluxDBFactory.connect( - "http://" + TestUtils.getInfluxIP() + ":" + TestUtils.getInfluxPORT(true), - "admin", "admin", clientToUse); + InfluxDB influxDB = InfluxDBFactory.connect(getInfluxURL(), "admin", "admin", clientToUse); boolean influxDBstarted = false; do { Pong response; From ef9589e8949e7dd7632dda195ff7dc29e25b8809 Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Fri, 1 Jun 2018 11:34:11 +0700 Subject: [PATCH 198/745] remove UDP API tests which have been moved to UDPInfluxDBTest --- src/test/java/org/influxdb/InfluxDBTest.java | 90 -------------------- 1 file changed, 90 deletions(-) diff --git a/src/test/java/org/influxdb/InfluxDBTest.java b/src/test/java/org/influxdb/InfluxDBTest.java index b0e1687e2..48aef944d 100644 --- a/src/test/java/org/influxdb/InfluxDBTest.java +++ b/src/test/java/org/influxdb/InfluxDBTest.java @@ -220,42 +220,6 @@ public void testWrite() { this.influxDB.deleteDatabase(dbName); } - /** - * Test the implementation of {@link InfluxDB#write(int, Point)}'s sync support. - */ - @Test - public void testSyncWritePointThroughUDP() throws InterruptedException { - this.influxDB.disableBatch(); - String measurement = TestUtils.getRandomMeasurement(); - Point point = Point.measurement(measurement).tag("atag", "test").addField("used", 80L).addField("free", 1L).build(); - this.influxDB.write(UDP_PORT, point); - Thread.sleep(2000); - Query query = new Query("SELECT * FROM " + measurement + " GROUP BY *", UDP_DATABASE); - QueryResult result = this.influxDB.query(query); - Assertions.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); - } - - /** - * Test the implementation of {@link InfluxDB#write(int, Point)}'s async support. - */ - @Test - public void testAsyncWritePointThroughUDP() throws InterruptedException { - this.influxDB.enableBatch(1, 1, TimeUnit.SECONDS); - try{ - Assertions.assertTrue(this.influxDB.isBatchEnabled()); - String measurement = TestUtils.getRandomMeasurement(); - Point point = Point.measurement(measurement).tag("atag", "test").addField("used", 80L).addField("free", 1L).build(); - this.influxDB.write(UDP_PORT, point); - Thread.sleep(2000); - Query query = new Query("SELECT * FROM " + measurement + " GROUP BY *", UDP_DATABASE); - QueryResult result = this.influxDB.query(query); - Assertions.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); - }finally{ - this.influxDB.disableBatch(); - } - } - - /** * Test the implementation of {@link InfluxDB#write(int, Point)}'s async support. */ @@ -307,60 +271,6 @@ public void testWriteStringDataSimple() { this.influxDB.deleteDatabase(dbName); } - /** - * Test writing to the database using string protocol through UDP. - */ - @Test - public void testWriteStringDataThroughUDP() throws InterruptedException { - String measurement = TestUtils.getRandomMeasurement(); - this.influxDB.write(UDP_PORT, measurement + ",atag=test idle=90,usertime=9,system=1"); - //write with UDP may be executed on server after query with HTTP. so sleep 2s to handle this case - Thread.sleep(2000); - Query query = new Query("SELECT * FROM " + measurement + " GROUP BY *", UDP_DATABASE); - QueryResult result = this.influxDB.query(query); - Assertions.assertFalse(result.getResults().get(0).getSeries().get(0).getTags().isEmpty()); - } - - /** - * Test writing multiple records to the database using string protocol through UDP. - */ - @Test - public void testWriteMultipleStringDataThroughUDP() throws InterruptedException { - String measurement = TestUtils.getRandomMeasurement(); - this.influxDB.write(UDP_PORT, measurement + ",atag=test1 idle=100,usertime=10,system=1\n" + - measurement + ",atag=test2 idle=200,usertime=20,system=2\n" + - measurement + ",atag=test3 idle=300,usertime=30,system=3"); - Thread.sleep(2000); - Query query = new Query("SELECT * FROM " + measurement + " GROUP BY *", UDP_DATABASE); - QueryResult result = this.influxDB.query(query); - - Assertions.assertEquals(3, result.getResults().get(0).getSeries().size()); - Assertions.assertEquals("test1", result.getResults().get(0).getSeries().get(0).getTags().get("atag")); - Assertions.assertEquals("test2", result.getResults().get(0).getSeries().get(1).getTags().get("atag")); - Assertions.assertEquals("test3", result.getResults().get(0).getSeries().get(2).getTags().get("atag")); - } - - /** - * Test writing multiple separate records to the database using string protocol through UDP. - */ - @Test - public void testWriteMultipleStringDataLinesThroughUDP() throws InterruptedException { - String measurement = TestUtils.getRandomMeasurement(); - this.influxDB.write(UDP_PORT, Arrays.asList( - measurement + ",atag=test1 idle=100,usertime=10,system=1", - measurement + ",atag=test2 idle=200,usertime=20,system=2", - measurement + ",atag=test3 idle=300,usertime=30,system=3" - )); - Thread.sleep(2000); - Query query = new Query("SELECT * FROM " + measurement + " GROUP BY *", UDP_DATABASE); - QueryResult result = this.influxDB.query(query); - - Assertions.assertEquals(3, result.getResults().get(0).getSeries().size()); - Assertions.assertEquals("test1", result.getResults().get(0).getSeries().get(0).getTags().get("atag")); - Assertions.assertEquals("test2", result.getResults().get(0).getSeries().get(1).getTags().get("atag")); - Assertions.assertEquals("test3", result.getResults().get(0).getSeries().get(2).getTags().get("atag")); - } - /** * When batch of points' size is over UDP limit, the expected exception * is java.lang.RuntimeException: java.net.SocketException: From fd8f830433ccf2a9aef82c9b2a0371dd97ac68f3 Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Fri, 1 Jun 2018 15:22:59 +0700 Subject: [PATCH 199/745] code coverage should be generated from running all tests --- compile-and-test.sh | 40 +++++++++++++++++++++------------------- 1 file changed, 21 insertions(+), 19 deletions(-) diff --git a/compile-and-test.sh b/compile-and-test.sh index 31171bf52..5c0117a3e 100755 --- a/compile-and-test.sh +++ b/compile-and-test.sh @@ -45,8 +45,8 @@ function run_test { INFLUXDB_API_URL=http://influxdb:8086 if [ "$USE_PROXY" == "nginx" ] ; then - echo Test with Nginx as proxy - INFLUXDB_API_URL=http://nginx:8080/influx-api/ + echo Test with Nginx as proxy + INFLUXDB_API_URL=http://nginx:8080/influx-api/ fi @@ -62,21 +62,22 @@ function run_test { --volume ${BUILD_HOME}/influxdb.conf:/etc/influxdb/influxdb.conf \ influxdb:${INFLUXDB_VERSION}-alpine + NGINX_LINK= + SKIP_TESTS= if [ "$USE_PROXY" == "nginx" ] ; then - echo Starting Nginx - docker kill nginx || true - docker rm nginx || true - echo ----- STARTING NGINX CONTAINER ----- - docker run \ - --detach \ - --name nginx \ - --publish 8888:8080 \ - --volume ${BUILD_HOME}/src/test/nginx/nginx.conf:/etc/nginx/conf.d/default.conf:ro \ - --link influxdb:influxdb \ - nginx nginx '-g' 'daemon off;' - - NGINX_LINK=--link=nginx - SKIP_TESTS=-DsomeModule.test.excludes="**/*UDPInfluxDBTest*" + echo Starting Nginx + docker kill nginx || true + docker rm nginx || true + echo ----- STARTING NGINX CONTAINER ----- + docker run \ + --detach \ + --name nginx \ + --publish 8888:8080 \ + --volume ${BUILD_HOME}/src/test/nginx/nginx.conf:/etc/nginx/conf.d/default.conf:ro \ + --link influxdb:influxdb \ + nginx nginx '-g' 'daemon off;' + NGINX_LINK=--link=nginx + SKIP_TESTS=-DsomeModule.test.excludes="**/*UDPInfluxDBTest*" fi docker run -it --rm \ @@ -88,9 +89,10 @@ function run_test { maven:${MAVEN_JAVA_VERSION} mvn clean install $SKIP_TESTS docker kill influxdb || true - docker kill nginx || true - docker rm -f nginx || true + if [ "$USE_PROXY" == "nginx" ] ; then + docker kill nginx || true + fi } -run_test run_test nginx +run_test From a7729c201fdc9ddaef7f3cf56f13ed721d871c09 Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Mon, 4 Jun 2018 10:11:33 +0700 Subject: [PATCH 200/745] fix Stenfan's review at https://github.com/influxdata/influxdb-java/pull/450#issuecomment-393854536 + source code documentation of changelog entry --- CHANGELOG.md | 1 + src/main/java/org/influxdb/impl/InfluxDBImpl.java | 13 ++++++++++--- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 24e142fa6..f0abb4668 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,7 @@ - Allow write precision of TimeUnit other than Nanoseconds [PR #321](https://github.com/influxdata/influxdb-java/pull/321) - Support dynamic measurement name in InfluxDBResultMapper [PR #423](https://github.com/influxdata/influxdb-java/pull/423) +- Debug mode which allows HTTP requests being sent to the database to be logged [PR #450](https://github.com/influxdata/influxdb-java/pull/450) ## 2.10 [2018-04-26] diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index 8354c478d..d81ca517c 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -59,7 +59,14 @@ public class InfluxDBImpl implements InfluxDB { static final okhttp3.MediaType MEDIA_TYPE_STRING = MediaType.parse("text/plain"); private static final String SHOW_DATABASE_COMMAND_ENCODED = Query.encode("SHOW DATABASES"); - private static final String DEBUG_MODE_LOG_LEVEL = System.getProperty(LOG_LEVEL_PROPERTY); + + /** + * This static constant holds the http logging log level expected in DEBUG mode + * It is set by System property {@code org.influxdb.InfluxDB.logLevel}. + * + * @see org.influxdb.impl.LOG_LEVEL_PROPERTY + */ + private static final LogLevel LOG_LEVEL = LogLevel.parseLogLevel(System.getProperty(LOG_LEVEL_PROPERTY)); private final InetAddress hostAddress; private final String username; @@ -89,7 +96,7 @@ public InfluxDBImpl(final String url, final String username, final String passwo this.password = password; this.loggingInterceptor = new HttpLoggingInterceptor(); - setLogLevel(LogLevel.parseLogLevel(DEBUG_MODE_LOG_LEVEL)); + setLogLevel(LOG_LEVEL); this.gzipRequestInterceptor = new GzipRequestInterceptor(); this.retrofit = new Retrofit.Builder() @@ -109,7 +116,7 @@ public InfluxDBImpl(final String url, final String username, final String passwo this.password = password; this.loggingInterceptor = new HttpLoggingInterceptor(); - setLogLevel(LogLevel.parseLogLevel(DEBUG_MODE_LOG_LEVEL)); + setLogLevel(LOG_LEVEL); this.gzipRequestInterceptor = new GzipRequestInterceptor(); this.retrofit = new Retrofit.Builder() From 232be1b0f58fbb5a98f92348a1ea9eca30609103 Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Tue, 5 Jun 2018 14:57:59 +0700 Subject: [PATCH 201/745] improve unit test --- compile-and-test.sh | 126 ++++++------------ pom.xml | 5 - .../org/influxdb/InfluxDBFactoryTest.java | 7 +- .../java/org/influxdb/InfluxDBProxyTest.java | 77 +++++++++++ src/test/java/org/influxdb/InfluxDBTest.java | 6 +- .../java/org/influxdb/PerformanceTests.java | 2 +- src/test/java/org/influxdb/TestUtils.java | 103 +++++++------- .../java/org/influxdb/UDPInfluxDBTest.java | 2 +- .../influxdb/impl/ChunkingExceptionTest.java | 28 ++-- src/test/nginx/nginx.conf | 75 ++++------- 10 files changed, 217 insertions(+), 214 deletions(-) create mode 100644 src/test/java/org/influxdb/InfluxDBProxyTest.java diff --git a/compile-and-test.sh b/compile-and-test.sh index 5c0117a3e..2008aa2c8 100755 --- a/compile-and-test.sh +++ b/compile-and-test.sh @@ -2,20 +2,6 @@ # # script to start influxdb and compile influxdb-java with all tests. # -# Note for Windows users: -# In case your docker still uses VirtualBox as a VM, you will probably have to -# inform the containers about the location of your repository. -# Please not that Docker for windows, enables you to mount everything -# from your Users (C:\Users in most cases) directory, so to keep it easy, -# it's better to keep your repository somewhere there. -# If you will decide to put your sources somewhere else, please visit your -# VirtualBox settings and check out the "Shared folder configuration". -# This script uses environment variable BUILD_HOME which should point to this -# project directory (i.e. //c/Users/MyWindowsUserName/Projects/influxdb-java) -# -# Of course you still need bash to launch this script. But this should be no -# problem either to install it (this script was tested with GitExtensions package). - set -e DEFAULT_INFLUXDB_VERSION="1.5" @@ -24,75 +10,45 @@ DEFAULT_MAVEN_JAVA_VERSION="3-jdk-10-slim" INFLUXDB_VERSION="${INFLUXDB_VERSION:-$DEFAULT_INFLUXDB_VERSION}" MAVEN_JAVA_VERSION="${MAVEN_JAVA_VERSION:-$DEFAULT_MAVEN_JAVA_VERSION}" -WORKDIR=/usr/src/mymaven - -if [ -z "$BUILD_HOME" ] ; then - BUILD_HOME=$PWD - if [ -x /c/Windows/System32/ ] ; then - BUILD_HOME=/$PWD - fi -fi - -if [ -x /c/Windows/System32/ ] ; then - echo "Detected Windows as a host system" - WORKDIR=//usr/src/mymaven -fi - -echo Using build home: $BUILD_HOME - -function run_test { - USE_PROXY=$1 - - INFLUXDB_API_URL=http://influxdb:8086 - if [ "$USE_PROXY" == "nginx" ] ; then - echo Test with Nginx as proxy - INFLUXDB_API_URL=http://nginx:8080/influx-api/ - fi - - - echo "Run tests with maven:${MAVEN_JAVA_VERSION} on onfluxdb-${INFLUXDB_VERSION}" - docker kill influxdb || true - docker rm influxdb || true - docker pull influxdb:${version}-alpine || true - docker run \ +echo "Run tests with maven:${MAVEN_JAVA_VERSION} on onfluxdb-${INFLUXDB_VERSION}" +docker kill influxdb || true +docker rm influxdb || true +docker pull influxdb:${version}-alpine || true +docker run \ + --detach \ + --name influxdb \ + --publish 8086:8086 \ + --publish 8089:8089/udp \ + --volume ${PWD}/influxdb.conf:/etc/influxdb/influxdb.conf \ + influxdb:${INFLUXDB_VERSION}-alpine + +echo Starting Nginx +docker kill nginx || true +docker rm nginx || true + +echo ----- STARTING NGINX CONTAINER ----- +docker run \ --detach \ - --name influxdb \ - --publish 8086:8086 \ - --publish 8089:8089/udp \ - --volume ${BUILD_HOME}/influxdb.conf:/etc/influxdb/influxdb.conf \ - influxdb:${INFLUXDB_VERSION}-alpine - - NGINX_LINK= - SKIP_TESTS= - if [ "$USE_PROXY" == "nginx" ] ; then - echo Starting Nginx - docker kill nginx || true - docker rm nginx || true - echo ----- STARTING NGINX CONTAINER ----- - docker run \ - --detach \ - --name nginx \ - --publish 8888:8080 \ - --volume ${BUILD_HOME}/src/test/nginx/nginx.conf:/etc/nginx/conf.d/default.conf:ro \ - --link influxdb:influxdb \ - nginx nginx '-g' 'daemon off;' - NGINX_LINK=--link=nginx - SKIP_TESTS=-DsomeModule.test.excludes="**/*UDPInfluxDBTest*" - fi - - docker run -it --rm \ - --volume $BUILD_HOME:/usr/src/mymaven \ - --volume $BUILD_HOME/.m2:/root/.m2 \ - --workdir $WORKDIR \ - --link=influxdb $NGINX_LINK \ - --env INFLUXDB_API_URL=$INFLUXDB_API_URL \ - maven:${MAVEN_JAVA_VERSION} mvn clean install $SKIP_TESTS - - docker kill influxdb || true - if [ "$USE_PROXY" == "nginx" ] ; then - docker kill nginx || true - fi -} - -run_test nginx -run_test + --name nginx \ + --publish 8080:8080 \ + --publish 8080:8080/udp \ + --volume ${PWD}/src/test/nginx/nginx.conf:/etc/nginx/nginx.conf:ro \ + --link influxdb:influxdb \ + nginx:stable nginx '-g' 'daemon off;' + +PROXY_API_URL=http://nginx:8080/influx-api/ +PROXY_UDP_PORT=8080 + +docker run -it --rm \ + --volume $PWD:/usr/src/mymaven \ + --volume $PWD/.m2:/root/.m2 \ + --workdir /usr/src/mymaven \ + --link=influxdb \ + --link=nginx \ + --env INFLUXDB_IP=influxdb \ + --env PROXY_API_URL=$PROXY_API_URL \ + --env PROXY_UDP_PORT=$PROXY_UDP_PORT \ + maven:${MAVEN_JAVA_VERSION} mvn clean install + +docker kill influxdb || true +docker kill nginx || true diff --git a/pom.xml b/pom.xml index e648ffd8b..13da8cd69 100644 --- a/pom.xml +++ b/pom.xml @@ -89,11 +89,6 @@ org.apache.maven.plugins maven-surefire-plugin 2.21.0 - - - ${someModule.test.excludes} - - org.apache.maven.plugins diff --git a/src/test/java/org/influxdb/InfluxDBFactoryTest.java b/src/test/java/org/influxdb/InfluxDBFactoryTest.java index 0ff1a3127..99291b522 100644 --- a/src/test/java/org/influxdb/InfluxDBFactoryTest.java +++ b/src/test/java/org/influxdb/InfluxDBFactoryTest.java @@ -1,12 +1,13 @@ package org.influxdb; -import okhttp3.OkHttpClient; import org.influxdb.dto.Pong; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.junit.platform.runner.JUnitPlatform; import org.junit.runner.RunWith; +import okhttp3.OkHttpClient; + /** * Test the InfluxDB Factory API. * @@ -21,7 +22,7 @@ public class InfluxDBFactoryTest { */ @Test public void testCreateInfluxDBInstanceWithoutUserNameAndPassword() { - InfluxDB influxDB = InfluxDBFactory.connect(TestUtils.getInfluxURL()); + InfluxDB influxDB = InfluxDBFactory.connect("http://" + TestUtils.getInfluxIP() + ":" + TestUtils.getInfluxPORT(true)); verifyInfluxDBInstance(influxDB); } @@ -37,7 +38,7 @@ private void verifyInfluxDBInstance(InfluxDB influxDB) { */ @Test public void testCreateInfluxDBInstanceWithClientAndWithoutUserNameAndPassword() { - InfluxDB influxDB = InfluxDBFactory.connect(TestUtils.getInfluxURL(), new OkHttpClient.Builder()); + InfluxDB influxDB = InfluxDBFactory.connect("http://" + TestUtils.getInfluxIP() + ":" + TestUtils.getInfluxPORT(true), new OkHttpClient.Builder()); verifyInfluxDBInstance(influxDB); } diff --git a/src/test/java/org/influxdb/InfluxDBProxyTest.java b/src/test/java/org/influxdb/InfluxDBProxyTest.java new file mode 100644 index 000000000..d60c0025f --- /dev/null +++ b/src/test/java/org/influxdb/InfluxDBProxyTest.java @@ -0,0 +1,77 @@ +package org.influxdb; + +import java.io.IOException; +import java.util.concurrent.TimeUnit; + +import org.influxdb.dto.Point; +import org.influxdb.dto.Query; +import org.influxdb.dto.QueryResult; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.platform.runner.JUnitPlatform; +import org.junit.runner.RunWith; +/** + * Test the InfluxDB API. + * + * @author hoan.le [at] bonitoo.io + * + */ +@RunWith(JUnitPlatform.class) +public class InfluxDBProxyTest { + private InfluxDB influxDB; + private String db = "udp"; + + @BeforeEach + public void setUp() throws InterruptedException, IOException { + this.influxDB = TestUtils.connectToInfluxDB(TestUtils.getProxyApiUrl()); + this.influxDB.createDatabase(db); + influxDB.setDatabase(db); + } + + /** + * delete database after all tests end. + */ + @AfterEach + public void cleanup(){ + this.influxDB.deleteDatabase(db); + } + + @Test + public void testWriteSomePointThroughTcpProxy() { + for(int i = 0; i < 20; i++) { + Point point = Point.measurement("weather") + .time(i,TimeUnit.HOURS) + .addField("temperature", (double) i) + .addField("humidity", (double) (i) * 1.1) + .addField("uv_index", "moderate").build(); + influxDB.write(point); + } + + QueryResult result = influxDB.query(new Query("select * from weather", db)); + //check points written already to DB + Assertions.assertEquals(20, result.getResults().get(0).getSeries().get(0).getValues().size()); + + } + + @Test + public void testWriteSomePointThroughUdpProxy() throws InterruptedException { + int proxyUdpPort = Integer.parseInt(TestUtils.getProxyUdpPort()); + for(int i = 0; i < 20; i++) { + Point point = Point.measurement("weather") + .time(i,TimeUnit.HOURS) + .addField("temperature", (double) i) + .addField("humidity", (double) (i) * 1.1) + .addField("uv_index", "moderate").build(); + influxDB.write(proxyUdpPort, point); + } + + Thread.sleep(2000); + QueryResult result = influxDB.query(new Query("select * from weather", db)); + //check points written already to DB + Assertions.assertEquals(20, result.getResults().get(0).getSeries().get(0).getValues().size()); + + } + +} diff --git a/src/test/java/org/influxdb/InfluxDBTest.java b/src/test/java/org/influxdb/InfluxDBTest.java index 48aef944d..c80ea0cd1 100644 --- a/src/test/java/org/influxdb/InfluxDBTest.java +++ b/src/test/java/org/influxdb/InfluxDBTest.java @@ -658,7 +658,7 @@ public void testBatchEnabledTwice() { */ @Test public void testCloseInfluxDBClient() { - InfluxDB influxDB = InfluxDBFactory.connect(TestUtils.getInfluxURL(), "admin", "admin"); + InfluxDB influxDB = InfluxDBFactory.connect("http://" + TestUtils.getInfluxIP() + ":" + TestUtils.getInfluxPORT(true), "admin", "admin"); influxDB.enableBatch(1, 1, TimeUnit.SECONDS); Assertions.assertTrue(influxDB.isBatchEnabled()); influxDB.close(); @@ -670,7 +670,7 @@ public void testCloseInfluxDBClient() { */ @Test public void testWriteEnableGzip() { - InfluxDB influxDBForTestGzip = InfluxDBFactory.connect(TestUtils.getInfluxURL(), "admin", "admin"); + InfluxDB influxDBForTestGzip = InfluxDBFactory.connect("http://" + TestUtils.getInfluxIP() + ":" + TestUtils.getInfluxPORT(true), "admin", "admin"); String dbName = "write_unittest_" + System.currentTimeMillis(); try { influxDBForTestGzip.setLogLevel(LogLevel.NONE); @@ -702,7 +702,7 @@ public void testWriteEnableGzip() { */ @Test public void testWriteEnableGzipAndDisableGzip() { - InfluxDB influxDBForTestGzip = InfluxDBFactory.connect(TestUtils.getInfluxURL(), "admin", "admin"); + InfluxDB influxDBForTestGzip = InfluxDBFactory.connect("http://" + TestUtils.getInfluxIP() + ":" + TestUtils.getInfluxPORT(true), "admin", "admin"); try { //test default: gzip is disable Assertions.assertFalse(influxDBForTestGzip.isGzipEnabled()); diff --git a/src/test/java/org/influxdb/PerformanceTests.java b/src/test/java/org/influxdb/PerformanceTests.java index 23ac56ea2..65055c507 100644 --- a/src/test/java/org/influxdb/PerformanceTests.java +++ b/src/test/java/org/influxdb/PerformanceTests.java @@ -33,7 +33,7 @@ public class PerformanceTests { @BeforeEach public void setUp() { - this.influxDB = InfluxDBFactory.connect(TestUtils.getInfluxURL(), "root", "root"); + this.influxDB = InfluxDBFactory.connect("http://" + TestUtils.getInfluxIP() + ":" + TestUtils.getInfluxPORT(true), "root", "root"); this.influxDB.setLogLevel(LogLevel.NONE); this.influxDB.createDatabase(UDP_DATABASE); } diff --git a/src/test/java/org/influxdb/TestUtils.java b/src/test/java/org/influxdb/TestUtils.java index 78f81c095..9d46ec09b 100644 --- a/src/test/java/org/influxdb/TestUtils.java +++ b/src/test/java/org/influxdb/TestUtils.java @@ -4,76 +4,75 @@ import org.influxdb.dto.Pong; import java.io.IOException; -import java.net.MalformedURLException; -import java.net.URL; import java.util.Map; public class TestUtils { - public static String getInfluxURL() { - String ip = "http://127.0.0.1:8086"; + private static String getEnv(String name, String defaultValue) { + Map getenv = System.getenv(); - Map getenv = System.getenv(); - if (getenv.containsKey("INFLUXDB_API_URL")) { - ip = getenv.get("INFLUXDB_API_URL"); - } - return ip; - } - - public static String getInfluxHost() { - URL url; - try { - url = new URL(getInfluxURL()); - } catch (MalformedURLException e) { - return null; - } - return url.getHost(); + if (getenv.containsKey(name)) { + return getenv.get(name); + } else { + return defaultValue; } + } + + public static String getInfluxIP() { + return getEnv("INFLUXDB_IP", "127.0.0.1"); + } + + public static String getRandomMeasurement() { + return "measurement_" + System.nanoTime(); + } + + public static String getInfluxPORT(boolean apiPort) { + if(apiPort) { + return getEnv("INFLUXDB_PORT_API", "8086"); + } + else { + return getEnv("INFLUXDB_PORT_COLLECTD", "8096"); + } + } - public static String getInfluxPORT(boolean apiPort) { - if(apiPort) { - URL url; - try { - url = new URL(getInfluxURL()); - } catch (MalformedURLException e) { - return null; - } - return Integer.toString(url.getPort()); - } - else { - Map getenv = System.getenv(); - String port = "8096"; - if (getenv.containsKey("INFLUXDB_PORT_COLLECTD")) { - port = getenv.get("INFLUXDB_PORT_COLLECTD"); - } - return port; - } - } + public static String getProxyApiUrl() { + return getEnv("PROXY_API_URL", "http://127.0.0.1:8086/"); + } - public static String getRandomMeasurement() { - return "measurement_" + System.nanoTime(); - } - - public static String defaultRetentionPolicy(String version) { - if (version.startsWith("0.")) { - return "default"; - } else { - return "autogen"; - } - } + public static String getProxyUdpPort() { + return getEnv("PROXY_UDP_PORT", "8089"); + } + + public static String defaultRetentionPolicy(String version) { + if (version.startsWith("0.") ) { + return "default"; + } else { + return "autogen"; + } + } public static InfluxDB connectToInfluxDB() throws InterruptedException, IOException { - return connectToInfluxDB(null); + return connectToInfluxDB(null, null); } - public static InfluxDB connectToInfluxDB( final OkHttpClient.Builder client) throws InterruptedException, IOException { + public static InfluxDB connectToInfluxDB(String apiUrl) throws InterruptedException, IOException { + return connectToInfluxDB(new OkHttpClient.Builder(), apiUrl); + } + + public static InfluxDB connectToInfluxDB(final OkHttpClient.Builder client, String apiUrl) throws InterruptedException, IOException { OkHttpClient.Builder clientToUse; if (client == null) { clientToUse = new OkHttpClient.Builder(); } else { clientToUse = client; } - InfluxDB influxDB = InfluxDBFactory.connect(getInfluxURL(), "admin", "admin", clientToUse); + String apiUrlToUse; + if (apiUrl == null) { + apiUrlToUse = "http://" + TestUtils.getInfluxIP() + ":" + TestUtils.getInfluxPORT(true); + } else { + apiUrlToUse = apiUrl; + } + InfluxDB influxDB = InfluxDBFactory.connect(apiUrlToUse, "admin", "admin", clientToUse); boolean influxDBstarted = false; do { Pong response; diff --git a/src/test/java/org/influxdb/UDPInfluxDBTest.java b/src/test/java/org/influxdb/UDPInfluxDBTest.java index a0ff74c62..c63fcae86 100644 --- a/src/test/java/org/influxdb/UDPInfluxDBTest.java +++ b/src/test/java/org/influxdb/UDPInfluxDBTest.java @@ -38,7 +38,7 @@ public class UDPInfluxDBTest { */ @BeforeEach public void setUp() throws InterruptedException, IOException { - this.influxDB = InfluxDBFactory.connect(TestUtils.getInfluxURL(), "admin", "admin"); + this.influxDB = InfluxDBFactory.connect("http://" + TestUtils.getInfluxIP() + ":" + TestUtils.getInfluxPORT(true), "admin", "admin"); boolean influxDBstarted = false; do { Pong response; diff --git a/src/test/java/org/influxdb/impl/ChunkingExceptionTest.java b/src/test/java/org/influxdb/impl/ChunkingExceptionTest.java index b7e61e519..c81189b92 100644 --- a/src/test/java/org/influxdb/impl/ChunkingExceptionTest.java +++ b/src/test/java/org/influxdb/impl/ChunkingExceptionTest.java @@ -1,16 +1,19 @@ package org.influxdb.impl; -import com.squareup.moshi.JsonAdapter; -import com.squareup.moshi.JsonReader; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + import java.io.EOFException; import java.io.IOException; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; -import okhttp3.OkHttpClient; -import okhttp3.ResponseBody; -import okio.Buffer; + import org.influxdb.InfluxDB; import org.influxdb.TestUtils; import org.influxdb.dto.Query; @@ -20,12 +23,13 @@ import org.junit.platform.runner.JUnitPlatform; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; + +import com.squareup.moshi.JsonAdapter; +import com.squareup.moshi.JsonReader; + +import okhttp3.OkHttpClient; +import okhttp3.ResponseBody; +import okio.Buffer; import retrofit2.Call; import retrofit2.Callback; import retrofit2.Response; @@ -56,7 +60,7 @@ public void testChunkingException(Exception ex, String message) throws IOExcepti when(responseBody.source()).thenReturn(new Buffer()); doThrow(ex).when(adapter).fromJson(any(JsonReader.class)); - String url = TestUtils.getInfluxURL(); + String url = "http://" + TestUtils.getInfluxIP() + ":" + TestUtils.getInfluxPORT(true); InfluxDB influxDB = new InfluxDBImpl(url, "admin", "admin", new OkHttpClient.Builder(), influxDBService, adapter) { @Override public String version() { diff --git a/src/test/nginx/nginx.conf b/src/test/nginx/nginx.conf index 170a7bde2..63fc4ce9f 100644 --- a/src/test/nginx/nginx.conf +++ b/src/test/nginx/nginx.conf @@ -1,56 +1,27 @@ -server { - listen 8080; - server_name localhost; +user www-data; +worker_processes auto; +pid /run/nginx.pid; - #charset koi8-r; - #access_log /var/log/nginx/host.access.log main; - - location / { - proxy_pass http://influxdb:8086/; - #root /var/www/htmlllll; - #index index.html index.htm; - } - - location /influx-api/ { - proxy_pass http://influxdb:8086/; - proxy_http_version 1.1; - proxy_set_header Host $http_host; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_redirect off; - error_log /tmp/inluxproxy.debug.log debug; - } - - - #error_page 404 /404.html; - - # redirect server error pages to the static page /50x.html - # - error_page 500 502 503 504 /50x.html; - location = /50x.html { - root /usr/share/nginx/html; - } - - # proxy the PHP scripts to Apache listening on 127.0.0.1:80 - # - #location ~ \.php$ { - # proxy_pass http://127.0.0.1; - #} +events { + worker_connections 768; + # multi_accept on; +} - # pass the PHP scripts to FastCGI server listening on 127.0.0.1:9000 - # - #location ~ \.php$ { - # root html; - # fastcgi_pass 127.0.0.1:9000; - # fastcgi_index index.php; - # fastcgi_param SCRIPT_FILENAME /scripts$fastcgi_script_name; - # include fastcgi_params; - #} - # deny access to .htaccess files, if Apache's document root - # concurs with nginx's one - # - #location ~ /\.ht { - # deny all; - #} +http { + server { + listen 8080; + location / { + proxy_pass http://influxdb:8086/; + } + location /influx-api/ { + proxy_pass http://influxdb:8086/; + } + } +} +stream { + server { + listen 8080 udp; + proxy_pass influxdb:8089; + } } - From 13391662daf3c29a9d6cb45d844712dae80ed927 Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Tue, 5 Jun 2018 16:12:33 +0700 Subject: [PATCH 202/745] fix review: + code format + test db naming --- .../java/org/influxdb/InfluxDBProxyTest.java | 25 +++++--- src/test/java/org/influxdb/TestUtils.java | 58 +++++++++---------- 2 files changed, 45 insertions(+), 38 deletions(-) diff --git a/src/test/java/org/influxdb/InfluxDBProxyTest.java b/src/test/java/org/influxdb/InfluxDBProxyTest.java index d60c0025f..9baef408d 100644 --- a/src/test/java/org/influxdb/InfluxDBProxyTest.java +++ b/src/test/java/org/influxdb/InfluxDBProxyTest.java @@ -21,13 +21,12 @@ @RunWith(JUnitPlatform.class) public class InfluxDBProxyTest { private InfluxDB influxDB; - private String db = "udp"; + private static final String TEST_DB = "InfluxDBProxyTest_db"; + private static final String UDP_DB = "udp"; @BeforeEach public void setUp() throws InterruptedException, IOException { - this.influxDB = TestUtils.connectToInfluxDB(TestUtils.getProxyApiUrl()); - this.influxDB.createDatabase(db); - influxDB.setDatabase(db); + influxDB = TestUtils.connectToInfluxDB(TestUtils.getProxyApiUrl()); } /** @@ -35,11 +34,14 @@ public void setUp() throws InterruptedException, IOException { */ @AfterEach public void cleanup(){ - this.influxDB.deleteDatabase(db); + influxDB.close(); } @Test public void testWriteSomePointThroughTcpProxy() { + influxDB.createDatabase(TEST_DB); + influxDB.setDatabase(TEST_DB); + for(int i = 0; i < 20; i++) { Point point = Point.measurement("weather") .time(i,TimeUnit.HOURS) @@ -49,14 +51,18 @@ public void testWriteSomePointThroughTcpProxy() { influxDB.write(point); } - QueryResult result = influxDB.query(new Query("select * from weather", db)); + QueryResult result = influxDB.query(new Query("select * from weather", TEST_DB)); //check points written already to DB Assertions.assertEquals(20, result.getResults().get(0).getSeries().get(0).getValues().size()); - + + influxDB.deleteDatabase(TEST_DB); } @Test public void testWriteSomePointThroughUdpProxy() throws InterruptedException { + influxDB.createDatabase(UDP_DB); + influxDB.setDatabase(UDP_DB); + int proxyUdpPort = Integer.parseInt(TestUtils.getProxyUdpPort()); for(int i = 0; i < 20; i++) { Point point = Point.measurement("weather") @@ -68,10 +74,11 @@ public void testWriteSomePointThroughUdpProxy() throws InterruptedException { } Thread.sleep(2000); - QueryResult result = influxDB.query(new Query("select * from weather", db)); + QueryResult result = influxDB.query(new Query("select * from weather", UDP_DB)); //check points written already to DB Assertions.assertEquals(20, result.getResults().get(0).getSeries().get(0).getValues().size()); - + + influxDB.deleteDatabase(UDP_DB); } } diff --git a/src/test/java/org/influxdb/TestUtils.java b/src/test/java/org/influxdb/TestUtils.java index 9d46ec09b..490087fd9 100644 --- a/src/test/java/org/influxdb/TestUtils.java +++ b/src/test/java/org/influxdb/TestUtils.java @@ -8,48 +8,48 @@ public class TestUtils { - private static String getEnv(String name, String defaultValue) { - Map getenv = System.getenv(); + private static String getEnv(String name, String defaultValue) { + Map getenv = System.getenv(); - if (getenv.containsKey(name)) { + if (getenv.containsKey(name)) { return getenv.get(name); } else { return defaultValue; } - } + } public static String getInfluxIP() { - return getEnv("INFLUXDB_IP", "127.0.0.1"); - } - - public static String getRandomMeasurement() { - return "measurement_" + System.nanoTime(); - } - - public static String getInfluxPORT(boolean apiPort) { - if(apiPort) { - return getEnv("INFLUXDB_PORT_API", "8086"); - } - else { - return getEnv("INFLUXDB_PORT_COLLECTD", "8096"); - } - } + return getEnv("INFLUXDB_IP", "127.0.0.1"); + } + + public static String getRandomMeasurement() { + return "measurement_" + System.nanoTime(); + } + + public static String getInfluxPORT(boolean apiPort) { + if(apiPort) { + return getEnv("INFLUXDB_PORT_API", "8086"); + } + else { + return getEnv("INFLUXDB_PORT_COLLECTD", "8096"); + } + } - public static String getProxyApiUrl() { + public static String getProxyApiUrl() { return getEnv("PROXY_API_URL", "http://127.0.0.1:8086/"); - } + } - public static String getProxyUdpPort() { + public static String getProxyUdpPort() { return getEnv("PROXY_UDP_PORT", "8089"); } - public static String defaultRetentionPolicy(String version) { - if (version.startsWith("0.") ) { - return "default"; - } else { - return "autogen"; - } - } + public static String defaultRetentionPolicy(String version) { + if (version.startsWith("0.") ) { + return "default"; + } else { + return "autogen"; + } + } public static InfluxDB connectToInfluxDB() throws InterruptedException, IOException { return connectToInfluxDB(null, null); From f9ca92fea8740d8b9726f62eb8bb768521aea55e Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Wed, 6 Jun 2018 10:06:57 +0700 Subject: [PATCH 203/745] add changelog entry for PR #400 --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index f0abb4668..6fa90d17d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ - Allow write precision of TimeUnit other than Nanoseconds [PR #321](https://github.com/influxdata/influxdb-java/pull/321) - Support dynamic measurement name in InfluxDBResultMapper [PR #423](https://github.com/influxdata/influxdb-java/pull/423) - Debug mode which allows HTTP requests being sent to the database to be logged [PR #450](https://github.com/influxdata/influxdb-java/pull/450) +- Fix problem of connecting to the influx api with URL which does not points to the url root (e.g. localhots:80/influx-api/) [PR #400] (https://github.com/influxdata/influxdb-java/pull/400) ## 2.10 [2018-04-26] From 0584ecf230cf2c84559450cbfab7502d520bb619 Mon Sep 17 00:00:00 2001 From: Tomas Klapka Date: Mon, 2 Jul 2018 17:17:56 +0200 Subject: [PATCH 204/745] [maven-release-plugin] prepare release influxdb-java-2.11 --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 13da8cd69..40918a69c 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.influxdb influxdb-java jar - 2.11-SNAPSHOT + 2.11 influxdb java bindings Java API to access the InfluxDB REST API http://www.influxdb.org @@ -28,7 +28,7 @@ scm:git:git@github.com:influxdata/influxdb-java.git scm:git:git@github.com:influxdata/influxdb-java.git git@github.com:influxdata/influxdb-java.git - HEAD + influxdb-java-2.11 From 1b12a6af4bdd9d362ba4f3639958ad3b637c04b4 Mon Sep 17 00:00:00 2001 From: Tomas Klapka Date: Mon, 2 Jul 2018 17:18:03 +0200 Subject: [PATCH 205/745] [maven-release-plugin] prepare for next development iteration --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 40918a69c..70c2fca18 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.influxdb influxdb-java jar - 2.11 + 2.12-SNAPSHOT influxdb java bindings Java API to access the InfluxDB REST API http://www.influxdb.org @@ -28,7 +28,7 @@ scm:git:git@github.com:influxdata/influxdb-java.git scm:git:git@github.com:influxdata/influxdb-java.git git@github.com:influxdata/influxdb-java.git - influxdb-java-2.11 + HEAD From 4463df87a25b24a924a1f70aae6557087e1554d9 Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Tue, 3 Jul 2018 11:30:23 +0700 Subject: [PATCH 206/745] add release date of 2.11 --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6fa90d17d..b76eea394 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # Changelog -## 2.11 [unreleased] +## 2.11 [2018-07-02] ### Features From 03add9f5e0d86afea7c9adca7cbf87384e56cb7d Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Fri, 13 Jul 2018 15:48:51 +0200 Subject: [PATCH 207/745] Influxdb version 1.6 is available, set is a additional tested configuration and as default --- .travis.yml | 3 ++- compile-and-test.sh | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 93b2893eb..9b2b74fde 100644 --- a/.travis.yml +++ b/.travis.yml @@ -6,12 +6,13 @@ services: # We test against all influxdb versions with the most actual JDK. # Test only the most recent influxdb version with JDK8 which will be legacy soon. env: + - MAVEN_JAVA_VERSION=3-jdk-10-slim INFLUXDB_VERSION=1.6 - MAVEN_JAVA_VERSION=3-jdk-10-slim INFLUXDB_VERSION=1.5 - MAVEN_JAVA_VERSION=3-jdk-10-slim INFLUXDB_VERSION=1.4 - MAVEN_JAVA_VERSION=3-jdk-10-slim INFLUXDB_VERSION=1.3 - MAVEN_JAVA_VERSION=3-jdk-10-slim INFLUXDB_VERSION=1.2 - MAVEN_JAVA_VERSION=3-jdk-10-slim INFLUXDB_VERSION=1.1 - - MAVEN_JAVA_VERSION=3-jdk-8-slim INFLUXDB_VERSION=1.5 + - MAVEN_JAVA_VERSION=3-jdk-8-slim INFLUXDB_VERSION=1.6 script: ./compile-and-test.sh diff --git a/compile-and-test.sh b/compile-and-test.sh index 2008aa2c8..b589acfb2 100755 --- a/compile-and-test.sh +++ b/compile-and-test.sh @@ -4,7 +4,7 @@ # set -e -DEFAULT_INFLUXDB_VERSION="1.5" +DEFAULT_INFLUXDB_VERSION="1.6" DEFAULT_MAVEN_JAVA_VERSION="3-jdk-10-slim" INFLUXDB_VERSION="${INFLUXDB_VERSION:-$DEFAULT_INFLUXDB_VERSION}" From 078144e9c35f7353c5097f35fea7c2f07879be23 Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Fri, 13 Jul 2018 16:27:36 +0200 Subject: [PATCH 208/745] Use much small nginx:alpine image, formatting and typos --- compile-and-test.sh | 48 +++++++++++++++++++-------------------- src/test/nginx/nginx.conf | 31 ++++++++++++------------- 2 files changed, 38 insertions(+), 41 deletions(-) diff --git a/compile-and-test.sh b/compile-and-test.sh index b589acfb2..b86e3224e 100755 --- a/compile-and-test.sh +++ b/compile-and-test.sh @@ -2,7 +2,7 @@ # # script to start influxdb and compile influxdb-java with all tests. # -set -e +set -ex DEFAULT_INFLUXDB_VERSION="1.6" DEFAULT_MAVEN_JAVA_VERSION="3-jdk-10-slim" @@ -10,23 +10,22 @@ DEFAULT_MAVEN_JAVA_VERSION="3-jdk-10-slim" INFLUXDB_VERSION="${INFLUXDB_VERSION:-$DEFAULT_INFLUXDB_VERSION}" MAVEN_JAVA_VERSION="${MAVEN_JAVA_VERSION:-$DEFAULT_MAVEN_JAVA_VERSION}" -echo "Run tests with maven:${MAVEN_JAVA_VERSION} on onfluxdb-${INFLUXDB_VERSION}" +echo "Run tests with maven:${MAVEN_JAVA_VERSION} on influxdb-${INFLUXDB_VERSION}" docker kill influxdb || true docker rm influxdb || true -docker pull influxdb:${version}-alpine || true +docker pull influxdb:${INFLUXDB_VERSION}-alpine || true docker run \ - --detach \ - --name influxdb \ - --publish 8086:8086 \ - --publish 8089:8089/udp \ - --volume ${PWD}/influxdb.conf:/etc/influxdb/influxdb.conf \ - influxdb:${INFLUXDB_VERSION}-alpine - -echo Starting Nginx -docker kill nginx || true + --detach \ + --name influxdb \ + --publish 8086:8086 \ + --publish 8089:8089/udp \ + --volume ${PWD}/influxdb.conf:/etc/influxdb/influxdb.conf \ + influxdb:${INFLUXDB_VERSION}-alpine + +echo "Starting Nginx" +docker kill nginx || true docker rm nginx || true -echo ----- STARTING NGINX CONTAINER ----- docker run \ --detach \ --name nginx \ @@ -34,21 +33,22 @@ docker run \ --publish 8080:8080/udp \ --volume ${PWD}/src/test/nginx/nginx.conf:/etc/nginx/nginx.conf:ro \ --link influxdb:influxdb \ - nginx:stable nginx '-g' 'daemon off;' + nginx:stable-alpine nginx '-g' 'daemon off;' +echo "Running tests" PROXY_API_URL=http://nginx:8080/influx-api/ PROXY_UDP_PORT=8080 - + docker run -it --rm \ - --volume $PWD:/usr/src/mymaven \ - --volume $PWD/.m2:/root/.m2 \ - --workdir /usr/src/mymaven \ - --link=influxdb \ - --link=nginx \ - --env INFLUXDB_IP=influxdb \ - --env PROXY_API_URL=$PROXY_API_URL \ - --env PROXY_UDP_PORT=$PROXY_UDP_PORT \ - maven:${MAVEN_JAVA_VERSION} mvn clean install + --volume ${PWD}:/usr/src/mymaven \ + --volume ${PWD}/.m2:/root/.m2 \ + --workdir /usr/src/mymaven \ + --link=influxdb \ + --link=nginx \ + --env INFLUXDB_IP=influxdb \ + --env PROXY_API_URL=${PROXY_API_URL} \ + --env PROXY_UDP_PORT=${PROXY_UDP_PORT} \ + maven:${MAVEN_JAVA_VERSION} mvn clean install docker kill influxdb || true docker kill nginx || true diff --git a/src/test/nginx/nginx.conf b/src/test/nginx/nginx.conf index 63fc4ce9f..06e03d492 100644 --- a/src/test/nginx/nginx.conf +++ b/src/test/nginx/nginx.conf @@ -1,27 +1,24 @@ -user www-data; worker_processes auto; pid /run/nginx.pid; events { - worker_connections 768; - # multi_accept on; + worker_connections 768; } - http { - server { - listen 8080; - location / { - proxy_pass http://influxdb:8086/; - } - location /influx-api/ { - proxy_pass http://influxdb:8086/; - } - } + server { + listen 8080; + location / { + proxy_pass http://influxdb:8086/; + } + location /influx-api/ { + proxy_pass http://influxdb:8086/; + } + } } stream { - server { - listen 8080 udp; - proxy_pass influxdb:8089; - } + server { + listen 8080 udp; + proxy_pass influxdb:8089; + } } From da68f5c6ecacf1350cbe69533df863565b93824d Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Fri, 13 Jul 2018 16:36:29 +0200 Subject: [PATCH 209/745] No bash debug --- compile-and-test.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/compile-and-test.sh b/compile-and-test.sh index b86e3224e..a046ec81c 100755 --- a/compile-and-test.sh +++ b/compile-and-test.sh @@ -2,7 +2,7 @@ # # script to start influxdb and compile influxdb-java with all tests. # -set -ex +set -e DEFAULT_INFLUXDB_VERSION="1.6" DEFAULT_MAVEN_JAVA_VERSION="3-jdk-10-slim" @@ -39,7 +39,7 @@ echo "Running tests" PROXY_API_URL=http://nginx:8080/influx-api/ PROXY_UDP_PORT=8080 -docker run -it --rm \ +docker run -it --rm \ --volume ${PWD}:/usr/src/mymaven \ --volume ${PWD}/.m2:/root/.m2 \ --workdir /usr/src/mymaven \ From f8bd92152307c888ed57b91961658b161f6ca2d3 Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Fri, 13 Jul 2018 16:36:48 +0200 Subject: [PATCH 210/745] Update okhttp from 3.10.0 -> 3.11.0 --- pom.xml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pom.xml b/pom.xml index 70c2fca18..948e20d7d 100644 --- a/pom.xml +++ b/pom.xml @@ -242,7 +242,7 @@ org.mockito mockito-core - 2.18.3 + 2.19.0 test @@ -260,12 +260,12 @@ com.squareup.okhttp3 okhttp - 3.10.0 + 3.11.0 com.squareup.okhttp3 logging-interceptor - 3.10.0 + 3.11.0 From 6804e13d74f06c9493d405f2ffd5f890ad4a4f2e Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Sat, 21 Jul 2018 11:56:20 +0200 Subject: [PATCH 211/745] Cache version per influxdb instance and reduce ping() calls for every query call, closes #470 --- src/main/java/org/influxdb/impl/InfluxDBImpl.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index d81ca517c..8af436587 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -71,6 +71,7 @@ public class InfluxDBImpl implements InfluxDB { private final InetAddress hostAddress; private final String username; private final String password; + private String version = ""; private final Retrofit retrofit; private final InfluxDBService influxDBService; private BatchProcessor batchProcessor; @@ -313,7 +314,10 @@ public Pong ping() { @Override public String version() { - return ping().getVersion(); + if (version == "") { + this.version = ping().getVersion(); + } + return this.version; } @Override From 96580fe5efd80ce8b055bf3f035e19dd35b79940 Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Sat, 21 Jul 2018 12:41:02 +0200 Subject: [PATCH 212/745] check for null instead of empty string --- src/main/java/org/influxdb/impl/InfluxDBImpl.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index 8af436587..9398b09e7 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -71,7 +71,7 @@ public class InfluxDBImpl implements InfluxDB { private final InetAddress hostAddress; private final String username; private final String password; - private String version = ""; + private String version; private final Retrofit retrofit; private final InfluxDBService influxDBService; private BatchProcessor batchProcessor; @@ -314,7 +314,7 @@ public Pong ping() { @Override public String version() { - if (version == "") { + if (version == null) { this.version = ping().getVersion(); } return this.version; From b1844b2e4355c5f8b1820b21b9d0da8f4102ccc5 Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Mon, 23 Jul 2018 16:45:48 +0200 Subject: [PATCH 213/745] Remove code which checks for unsupported influxdb versions --- src/main/java/org/influxdb/impl/InfluxDBImpl.java | 8 -------- 1 file changed, 8 deletions(-) diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index 9398b09e7..8a238d499 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -481,11 +481,6 @@ public void onFailure(final Call call, final Throwable throwable) { */ @Override public void query(final Query query, final int chunkSize, final Consumer consumer) { - - if (version().startsWith("0.") || version().startsWith("1.0")) { - throw new UnsupportedOperationException("chunking not supported"); - } - Call call = null; if (query instanceof BoundParameterQuery) { BoundParameterQuery boundParameterQuery = (BoundParameterQuery) query; @@ -556,9 +551,6 @@ public QueryResult query(final Query query, final TimeUnit timeUnit) { public void createDatabase(final String name) { Preconditions.checkNonEmptyString(name, "name"); String createDatabaseQueryString = String.format("CREATE DATABASE \"%s\"", name); - if (this.version().startsWith("0.")) { - createDatabaseQueryString = String.format("CREATE DATABASE IF NOT EXISTS \"%s\"", name); - } execute(this.influxDBService.postQuery(this.username, this.password, Query.encode(createDatabaseQueryString))); } From 408d80949f139fe8b4720110f32d41e18061c976 Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Fri, 20 Jul 2018 13:42:39 +0700 Subject: [PATCH 214/745] Implement Issue #389 : Support for MessagePack --- compile-and-test.sh | 1 + pom.xml | 5 + src/main/java/org/influxdb/InfluxDB.java | 9 + .../java/org/influxdb/InfluxDBFactory.java | 24 +- .../java/org/influxdb/impl/InfluxDBImpl.java | 203 +++++++++---- .../msgpack/MessagePackConverterFactory.java | 26 ++ .../MessagePackResponseBodyConverter.java | 28 ++ .../msgpack/MessagePackTraverser.java | 225 +++++++++++++++ .../msgpack/QueryResultModelPath.java | 47 +++ .../java/org/influxdb/BatchOptionsTest.java | 12 +- src/test/java/org/influxdb/InfluxDBTest.java | 99 ++++--- .../influxdb/MessagePackBatchOptionsTest.java | 271 ++++++++++++++++++ .../org/influxdb/MessagePackInfluxDBTest.java | 186 ++++++++++++ src/test/java/org/influxdb/TestUtils.java | 14 +- 14 files changed, 1037 insertions(+), 113 deletions(-) create mode 100644 src/main/java/org/influxdb/msgpack/MessagePackConverterFactory.java create mode 100644 src/main/java/org/influxdb/msgpack/MessagePackResponseBodyConverter.java create mode 100644 src/main/java/org/influxdb/msgpack/MessagePackTraverser.java create mode 100644 src/main/java/org/influxdb/msgpack/QueryResultModelPath.java create mode 100644 src/test/java/org/influxdb/MessagePackBatchOptionsTest.java create mode 100644 src/test/java/org/influxdb/MessagePackInfluxDBTest.java diff --git a/compile-and-test.sh b/compile-and-test.sh index a046ec81c..2fa5526fb 100755 --- a/compile-and-test.sh +++ b/compile-and-test.sh @@ -45,6 +45,7 @@ docker run -it --rm \ --workdir /usr/src/mymaven \ --link=influxdb \ --link=nginx \ + --env INFLUXDB_VERSION=${INFLUXDB_VERSION} \ --env INFLUXDB_IP=influxdb \ --env PROXY_API_URL=${PROXY_API_URL} \ --env PROXY_UDP_PORT=${PROXY_UDP_PORT} \ diff --git a/pom.xml b/pom.xml index 948e20d7d..37e53162a 100644 --- a/pom.xml +++ b/pom.xml @@ -255,6 +255,11 @@ converter-moshi 2.4.0 + + org.msgpack + msgpack-core + 0.8.16 + diff --git a/src/main/java/org/influxdb/InfluxDB.java b/src/main/java/org/influxdb/InfluxDB.java index 749ede060..7825dbfa7 100644 --- a/src/main/java/org/influxdb/InfluxDB.java +++ b/src/main/java/org/influxdb/InfluxDB.java @@ -94,6 +94,15 @@ public String value() { } } + /** + * Format of HTTP Response body from InfluxDB server. + */ + public enum ResponseFormat { + /** application/json format. */ + JSON, + /** application/x-msgpack format. */ + MSGPACK + } /** * Set the loglevel which is used for REST related actions. * diff --git a/src/main/java/org/influxdb/InfluxDBFactory.java b/src/main/java/org/influxdb/InfluxDBFactory.java index 309e2e7b7..aee28d73a 100644 --- a/src/main/java/org/influxdb/InfluxDBFactory.java +++ b/src/main/java/org/influxdb/InfluxDBFactory.java @@ -1,5 +1,6 @@ package org.influxdb; +import org.influxdb.InfluxDB.ResponseFormat; import org.influxdb.impl.InfluxDBImpl; import okhttp3.OkHttpClient; @@ -78,9 +79,30 @@ public static InfluxDB connect(final String url, final OkHttpClient.Builder clie */ public static InfluxDB connect(final String url, final String username, final String password, final OkHttpClient.Builder client) { + return connect(url, username, password, client, ResponseFormat.JSON); + } + + /** + * Create a connection to a InfluxDB. + * + * @param url + * the url to connect to. + * @param username + * the username which is used to authorize against the influxDB instance. + * @param password + * the password for the username which is used to authorize against the influxDB + * instance. + * @param client + * the HTTP client to use + * @param responseFormat + * The {@code ResponseFormat} to use for response from InfluxDB server + * @return a InfluxDB adapter suitable to access a InfluxDB. + */ + public static InfluxDB connect(final String url, final String username, final String password, + final OkHttpClient.Builder client, final ResponseFormat responseFormat) { Preconditions.checkNonEmptyString(url, "url"); Preconditions.checkNonEmptyString(username, "username"); Objects.requireNonNull(client, "client"); - return new InfluxDBImpl(url, username, password, client); + return new InfluxDBImpl(url, username, password, client, responseFormat); } } diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index 8a238d499..3d1e8913d 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -7,6 +7,7 @@ import okhttp3.HttpUrl; import okhttp3.MediaType; import okhttp3.OkHttpClient; +import okhttp3.Request; import okhttp3.RequestBody; import okhttp3.ResponseBody; import okhttp3.logging.HttpLoggingInterceptor; @@ -25,14 +26,19 @@ import org.influxdb.dto.QueryResult; import org.influxdb.impl.BatchProcessor.HttpBatchEntry; import org.influxdb.impl.BatchProcessor.UdpBatchEntry; +import org.influxdb.msgpack.MessagePackConverterFactory; +import org.influxdb.msgpack.MessagePackTraverser; + import retrofit2.Call; import retrofit2.Callback; +import retrofit2.Converter.Factory; import retrofit2.Response; import retrofit2.Retrofit; import retrofit2.converter.moshi.MoshiConverterFactory; import java.io.EOFException; import java.io.IOException; +import java.io.InputStream; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.InetAddress; @@ -56,6 +62,8 @@ */ public class InfluxDBImpl implements InfluxDB { + private static final String APPLICATION_MSGPACK = "application/x-msgpack"; + static final okhttp3.MediaType MEDIA_TYPE_STRING = MediaType.parse("text/plain"); private static final String SHOW_DATABASE_COMMAND_ENCODED = Query.encode("SHOW DATABASES"); @@ -83,15 +91,28 @@ public class InfluxDBImpl implements InfluxDB { private final HttpLoggingInterceptor loggingInterceptor; private final GzipRequestInterceptor gzipRequestInterceptor; private LogLevel logLevel = LogLevel.NONE; - private JsonAdapter adapter; private String database; private String retentionPolicy = "autogen"; private ConsistencyLevel consistency = ConsistencyLevel.ONE; + private final ChunkProccesor chunkProccesor; - public InfluxDBImpl(final String url, final String username, final String password, - final OkHttpClient.Builder client) { - super(); - Moshi moshi = new Moshi.Builder().build(); + /** + * Constructs a new {@code InfluxDBImpl}. + * + * @param url + * The InfluxDB server API URL + * @param username + * The InfluxDB user name + * @param password + * The InfluxDB user password + * @param client + * The OkHttp Client Builder + * @param responseFormat + * The {@code ResponseFormat} to use for response from InfluxDB + * server + */ + public InfluxDBImpl(final String url, final String username, final String password, final OkHttpClient.Builder client, + final ResponseFormat responseFormat) { this.hostAddress = parseHostAddress(url); this.username = username; this.password = password; @@ -100,38 +121,72 @@ public InfluxDBImpl(final String url, final String username, final String passwo setLogLevel(LOG_LEVEL); this.gzipRequestInterceptor = new GzipRequestInterceptor(); - this.retrofit = new Retrofit.Builder() - .baseUrl(url) - .client(client.addInterceptor(loggingInterceptor).addInterceptor(gzipRequestInterceptor).build()) - .addConverterFactory(MoshiConverterFactory.create()) + client.addInterceptor(loggingInterceptor).addInterceptor(gzipRequestInterceptor); + + Factory converterFactory = null; + switch (responseFormat) { + case MSGPACK: + client.addInterceptor(chain -> { + Request request = chain.request().newBuilder().addHeader("Accept", APPLICATION_MSGPACK) + .addHeader("Accept-Encoding", "identity").build(); + return chain.proceed(request); + }); + + converterFactory = MessagePackConverterFactory.create(); + chunkProccesor = new MessagePackChunkProccesor(); + break; + case JSON: + default: + converterFactory = MoshiConverterFactory.create(); + + Moshi moshi = new Moshi.Builder().build(); + JsonAdapter adapter = moshi.adapter(QueryResult.class); + chunkProccesor = new JSONChunkProccesor(adapter); + break; + } + + this.retrofit = new Retrofit.Builder().baseUrl(url).client(client.build()).addConverterFactory(converterFactory) .build(); this.influxDBService = this.retrofit.create(InfluxDBService.class); - this.adapter = moshi.adapter(QueryResult.class); - } - - InfluxDBImpl(final String url, final String username, final String password, final OkHttpClient.Builder client, - final InfluxDBService influxDBService, final JsonAdapter adapter) { - super(); - this.hostAddress = parseHostAddress(url); - this.username = username; - this.password = password; - - this.loggingInterceptor = new HttpLoggingInterceptor(); - setLogLevel(LOG_LEVEL); - - this.gzipRequestInterceptor = new GzipRequestInterceptor(); - this.retrofit = new Retrofit.Builder() - .baseUrl(url) - .client(client.addInterceptor(loggingInterceptor).addInterceptor(gzipRequestInterceptor).build()) - .addConverterFactory(MoshiConverterFactory.create()) - .build(); - this.influxDBService = influxDBService; - this.adapter = adapter; + + if (ResponseFormat.MSGPACK.equals(responseFormat)) { + String[] versionNumbers = version().split("\\."); + final int major = Integer.parseInt(versionNumbers[0]); + final int minor = Integer.parseInt(versionNumbers[1]); + final int fromMinor = 4; + if ((major < 2) && ((major != 1) || (minor < fromMinor))) { + throw new InfluxDBException("MessagePack format is only supported from InfluxDB version 1.4 and later"); + } } + } public InfluxDBImpl(final String url, final String username, final String password, - final OkHttpClient.Builder client, final String database, - final String retentionPolicy, final ConsistencyLevel consistency) { + final OkHttpClient.Builder client) { + this(url, username, password, client, ResponseFormat.JSON); + + } + + InfluxDBImpl(final String url, final String username, final String password, final OkHttpClient.Builder client, + final InfluxDBService influxDBService, final JsonAdapter adapter) { + super(); + this.hostAddress = parseHostAddress(url); + this.username = username; + this.password = password; + + this.loggingInterceptor = new HttpLoggingInterceptor(); + setLogLevel(LOG_LEVEL); + + this.gzipRequestInterceptor = new GzipRequestInterceptor(); + this.retrofit = new Retrofit.Builder().baseUrl(url) + .client(client.addInterceptor(loggingInterceptor).addInterceptor(gzipRequestInterceptor).build()) + .addConverterFactory(MoshiConverterFactory.create()).build(); + this.influxDBService = influxDBService; + + chunkProccesor = new JSONChunkProccesor(adapter); + } + + public InfluxDBImpl(final String url, final String username, final String password, final OkHttpClient.Builder client, + final String database, final String retentionPolicy, final ConsistencyLevel consistency) { this(url, username, password, client); setConsistency(consistency); @@ -492,32 +547,26 @@ public void query(final Query query, final int chunkSize, final Consumer() { - @Override - public void onResponse(final Call call, final Response response) { - try { - if (response.isSuccessful()) { - BufferedSource source = response.body().source(); - while (true) { - QueryResult result = InfluxDBImpl.this.adapter.fromJson(source); - if (result != null) { - consumer.accept(result); - } - } - } - try (ResponseBody errorBody = response.errorBody()) { - throw new InfluxDBException(errorBody.string()); - } - } catch (EOFException e) { - QueryResult queryResult = new QueryResult(); - queryResult.setError("DONE"); - consumer.accept(queryResult); - } catch (IOException e) { - QueryResult queryResult = new QueryResult(); - queryResult.setError(e.toString()); - consumer.accept(queryResult); - } + call.enqueue(new Callback() { + @Override + public void onResponse(final Call call, final Response response) { + try { + if (response.isSuccessful()) { + ResponseBody chunkedBody = response.body(); + chunkProccesor.process(chunkedBody, consumer); + } else { + // REVIEW: must be handled consistently with IOException. + ResponseBody errorBody = response.errorBody(); + if (errorBody != null) { + throw new InfluxDBException(errorBody.string()); } + } + } catch (IOException e) { + QueryResult queryResult = new QueryResult(); + queryResult.setError(e.toString()); + consumer.accept(queryResult); + } + } @Override public void onFailure(final Call call, final Throwable t) { @@ -748,4 +797,44 @@ public void dropRetentionPolicy(final String rpName, final String database) { Query.encode(queryBuilder.toString()))); } + private interface ChunkProccesor { + void process(ResponseBody chunkedBody, Consumer consumer) throws IOException; + } + + private class MessagePackChunkProccesor implements ChunkProccesor { + @Override + public void process(final ResponseBody chunkedBody, final Consumer consumer) throws IOException { + MessagePackTraverser traverser = new MessagePackTraverser(); + try (InputStream is = chunkedBody.byteStream()) { + for (QueryResult result : traverser.traverse(is)) { + consumer.accept(result); + } + } + } + } + + private class JSONChunkProccesor implements ChunkProccesor { + private JsonAdapter adapter; + + public JSONChunkProccesor(final JsonAdapter adapter) { + this.adapter = adapter; + } + + @Override + public void process(final ResponseBody chunkedBody, final Consumer consumer) throws IOException { + try { + BufferedSource source = chunkedBody.source(); + while (true) { + QueryResult result = adapter.fromJson(source); + if (result != null) { + consumer.accept(result); + } + } + } catch (EOFException e) { + QueryResult queryResult = new QueryResult(); + queryResult.setError("DONE"); + consumer.accept(queryResult); + } + } + } } diff --git a/src/main/java/org/influxdb/msgpack/MessagePackConverterFactory.java b/src/main/java/org/influxdb/msgpack/MessagePackConverterFactory.java new file mode 100644 index 000000000..baa8135df --- /dev/null +++ b/src/main/java/org/influxdb/msgpack/MessagePackConverterFactory.java @@ -0,0 +1,26 @@ +package org.influxdb.msgpack; + +import java.lang.annotation.Annotation; +import java.lang.reflect.Type; + +import okhttp3.ResponseBody; +import retrofit2.Converter; +import retrofit2.Retrofit; + +/** + * A Retrofit Convertor Factory for MessagePack response. + * + * @author hoan.le [at] bonitoo.io + * + */ +public class MessagePackConverterFactory extends Converter.Factory { + public static MessagePackConverterFactory create() { + return new MessagePackConverterFactory(); + } + + @Override + public Converter responseBodyConverter(final Type type, final Annotation[] annotations, + final Retrofit retrofit) { + return new MessagePackResponseBodyConverter(); + } +} diff --git a/src/main/java/org/influxdb/msgpack/MessagePackResponseBodyConverter.java b/src/main/java/org/influxdb/msgpack/MessagePackResponseBodyConverter.java new file mode 100644 index 000000000..ae6d49e9f --- /dev/null +++ b/src/main/java/org/influxdb/msgpack/MessagePackResponseBodyConverter.java @@ -0,0 +1,28 @@ +package org.influxdb.msgpack; + +import java.io.IOException; +import java.io.InputStream; + +import org.influxdb.dto.QueryResult; +import okhttp3.ResponseBody; +import retrofit2.Converter; + +/** + * Test the InfluxDB API over MessagePack format. + * + * @author hoan.le [at] bonitoo.io + * + */ +public class MessagePackResponseBodyConverter implements Converter { + + @Override + public QueryResult convert(final ResponseBody value) throws IOException { + try (InputStream is = value.byteStream()) { + MessagePackTraverser traverser = new MessagePackTraverser(); + for (QueryResult queryResult : traverser.traverse(is)) { + return queryResult; + } + return null; + } + } +} diff --git a/src/main/java/org/influxdb/msgpack/MessagePackTraverser.java b/src/main/java/org/influxdb/msgpack/MessagePackTraverser.java new file mode 100644 index 000000000..21808e76b --- /dev/null +++ b/src/main/java/org/influxdb/msgpack/MessagePackTraverser.java @@ -0,0 +1,225 @@ +package org.influxdb.msgpack; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import org.influxdb.InfluxDBException; +import org.influxdb.dto.QueryResult; +import org.influxdb.dto.QueryResult.Result; +import org.influxdb.dto.QueryResult.Series; +import org.msgpack.core.ExtensionTypeHeader; +import org.msgpack.core.MessageFormat; +import org.msgpack.core.MessagePack; +import org.msgpack.core.MessageUnpacker; +import org.msgpack.value.ValueType; + +/** + * Traverse the MessagePack input stream and return Query Result objects. + * + * @author hoan.le [at] bonitoo.io + * + */ +public class MessagePackTraverser { + + private String lastStringNode; + + public Iterable traverse(final InputStream is) throws IOException { + MessageUnpacker unpacker = MessagePack.newDefaultUnpacker(is); + + return () -> { + return new Iterator() { + @Override + public boolean hasNext() { + try { + return unpacker.hasNext(); + } catch (IOException e) { + throw new InfluxDBException(e); + } + } + + @Override + public QueryResult next() { + QueryResult queryResult = new QueryResult(); + QueryResultModelPath queryResultPath = new QueryResultModelPath(); + queryResultPath.add("queryResult", queryResult); + try { + traverse(unpacker, queryResultPath, 1); + } catch (IOException e) { + throw new InfluxDBException(e); + } + return queryResult; + } + }; + }; + + } + + void traverse(final MessageUnpacker unpacker, final QueryResultModelPath queryResultPath, + final int readAmount) throws IOException { + int amount = 0; + + while (unpacker.hasNext() && amount < readAmount) { + MessageFormat format = unpacker.getNextFormat(); + ValueType type = format.getValueType(); + int length; + ExtensionTypeHeader extension; + Object o = null; + byte[] dst; + String addedName = null; + Object addedObject = null; + switch (type) { + case NIL: + unpacker.unpackNil(); + break; + case BOOLEAN: + o = unpacker.unpackBoolean(); + break; + case INTEGER: + switch (format) { + case UINT64: + o = unpacker.unpackBigInteger(); + break; + case INT64: + case UINT32: + o = unpacker.unpackLong(); + break; + default: + o = unpacker.unpackInt(); + break; + } + break; + case FLOAT: + o = unpacker.unpackDouble(); + break; + case STRING: + o = unpacker.unpackString(); + lastStringNode = (String) o; + if ("name".equals(o) && queryResultPath.compareEndingPath("series")) { + queryResultPath.add("name", null); + } else if (queryResultPath.compareEndingPath("name")) { + queryResultPath.removeLast(); + Series series = queryResultPath.getLastObject(); + series.setName((String) o); + } else if (queryResultPath.compareEndingPath("tags")) { + queryResultPath.add("tagKey", o); + } else if (queryResultPath.compareEndingPath("tagKey")) { + String tagKey = queryResultPath.getLastObject(); + queryResultPath.removeLast(); + Map tags = queryResultPath.getLastObject(); + tags.put(tagKey, (String) o); + } else if (queryResultPath.compareEndingPath("columns")) { + List columns = queryResultPath.getLastObject(); + columns.add((String) o); + } + break; + case BINARY: + length = unpacker.unpackBinaryHeader(); + dst = new byte[length]; + unpacker.readPayload(dst); + break; + case ARRAY: + length = unpacker.unpackArrayHeader(); + if (length > 0) { + if ("results".equals(lastStringNode)) { + QueryResult queryResult = queryResultPath.getLastObject(); + List results = new ArrayList<>(); + queryResult.setResults(results); + addedName = "results"; + addedObject = results; + } else if ("series".equals(lastStringNode) && queryResultPath.compareEndingPath("result")) { + Result result = queryResultPath.getLastObject(); + List series = new ArrayList<>(); + result.setSeries(series); + addedName = "seriesList"; + addedObject = series; + } else if ("columns".equals(lastStringNode) && queryResultPath.compareEndingPath("series")) { + Series series = queryResultPath.getLastObject(); + List columns = new ArrayList<>(); + series.setColumns(columns); + addedName = "columns"; + addedObject = columns; + } else if ("values".equals(lastStringNode) && queryResultPath.compareEndingPath("series")) { + Series series = queryResultPath.getLastObject(); + List> values = new ArrayList<>(); + series.setValues(values); + addedName = "values"; + addedObject = values; + } else if (queryResultPath.compareEndingPath("values")) { + List> values = queryResultPath.getLastObject(); + List value = new ArrayList<>(); + values.add(value); + addedName = "value"; + addedObject = value; + } + + if (addedName != null) { + queryResultPath.add(addedName, addedObject); + } + traverse(unpacker, queryResultPath, length); + if (addedName != null) { + queryResultPath.removeLast(); + } + } + break; + case MAP: + length = unpacker.unpackMapHeader(); + if (queryResultPath.compareEndingPath("results")) { + List results = queryResultPath.getLastObject(); + Result result = new Result(); + results.add(result); + addedName = "result"; + addedObject = result; + } else if (queryResultPath.compareEndingPath("seriesList")) { + List series = queryResultPath.getLastObject(); + Series s = new Series(); + series.add(s); + addedName = "series"; + addedObject = s; + } else if ("tags".equals(lastStringNode) && queryResultPath.compareEndingPath("series")) { + Series series = queryResultPath.getLastObject(); + Map tags = new HashMap<>(); + series.setTags(tags); + addedName = "tags"; + addedObject = tags; + } + + if (addedName != null) { + queryResultPath.add(addedName, addedObject); + } + for (int i = 0; i < length; i++) { + traverse(unpacker, queryResultPath, 1); // key + traverse(unpacker, queryResultPath, 1); // value + } + if (addedName != null) { + queryResultPath.removeLast(); + } + break; + case EXTENSION: + final byte msgPackTimeExtType = (byte) 5; + final int timeOffset = 0; + final int timeByteArrayLength = 8; + extension = unpacker.unpackExtensionTypeHeader(); + if (extension.getType() == msgPackTimeExtType) { + dst = new byte[extension.getLength()]; + unpacker.readPayload(dst); + o = ByteBuffer.wrap(dst, timeOffset, timeByteArrayLength).getLong(); + } + break; + + default: + } + + if (queryResultPath.compareEndingPath("value")) { + List value = queryResultPath.getLastObject(); + value.add(o); + } + amount++; + } + } +} diff --git a/src/main/java/org/influxdb/msgpack/QueryResultModelPath.java b/src/main/java/org/influxdb/msgpack/QueryResultModelPath.java new file mode 100644 index 000000000..87c19d2cd --- /dev/null +++ b/src/main/java/org/influxdb/msgpack/QueryResultModelPath.java @@ -0,0 +1,47 @@ +package org.influxdb.msgpack; + +import java.util.ArrayList; +import java.util.List; + +/** + * A simple object model path, used internally for navigating on QueryResult objects + * when traverse and parse the MessagePack data. + * + * @author hoan.le [at] bonitoo.io + * + */ +class QueryResultModelPath { + private List names = new ArrayList<>(); + private List objects = new ArrayList<>(); + private int lastIndex = -1; + + public void add(final String name, final Object object) { + names.add(name); + objects.add(object); + lastIndex++; + } + + public T getLastObject() { + return (T) objects.get(lastIndex); + } + + public void removeLast() { + names.remove(lastIndex); + objects.remove(lastIndex); + lastIndex--; + } + + public boolean compareEndingPath(final String... names) { + int diff = (lastIndex + 1) - names.length; + if (diff < 0) { + return false; + } + for (int i = 0; i < names.length; i++) { + if (!names[i].equals(this.names.get(i + diff))) { + return false; + } + } + + return true; + } +} diff --git a/src/test/java/org/influxdb/BatchOptionsTest.java b/src/test/java/org/influxdb/BatchOptionsTest.java index b7d1830a3..9ae453756 100644 --- a/src/test/java/org/influxdb/BatchOptionsTest.java +++ b/src/test/java/org/influxdb/BatchOptionsTest.java @@ -27,7 +27,7 @@ @RunWith(JUnitPlatform.class) public class BatchOptionsTest { - private InfluxDB influxDB; + InfluxDB influxDB; @BeforeEach public void setUp() throws InterruptedException, IOException { @@ -485,7 +485,7 @@ protected void check(InvocationOnMock invocation) { } } - private void writeSomePoints(InfluxDB influxDB, String measurement, int firstIndex, int lastIndex) { + void writeSomePoints(InfluxDB influxDB, String measurement, int firstIndex, int lastIndex) { for (int i = firstIndex; i <= lastIndex; i++) { Point point = Point.measurement(measurement) .time(i,TimeUnit.HOURS) @@ -496,7 +496,7 @@ private void writeSomePoints(InfluxDB influxDB, String measurement, int firstInd } } - private void writeSomePoints(InfluxDB influxDB, int firstIndex, int lastIndex) { + void writeSomePoints(InfluxDB influxDB, int firstIndex, int lastIndex) { for (int i = firstIndex; i <= lastIndex; i++) { Point point = Point.measurement("weather") .time(i,TimeUnit.HOURS) @@ -507,15 +507,15 @@ private void writeSomePoints(InfluxDB influxDB, int firstIndex, int lastIndex) { } } - private void write20Points(InfluxDB influxDB) { + void write20Points(InfluxDB influxDB) { writeSomePoints(influxDB, 0, 19); } - private void writeSomePoints(InfluxDB influxDB, int n) { + void writeSomePoints(InfluxDB influxDB, int n) { writeSomePoints(influxDB, 0, n - 1); } - private static String createErrorBody(String errorMessage) { + static String createErrorBody(String errorMessage) { return MessageFormat.format("'{' \"error\": \"{0}\" '}'", errorMessage); } } diff --git a/src/test/java/org/influxdb/InfluxDBTest.java b/src/test/java/org/influxdb/InfluxDBTest.java index c80ea0cd1..468c9bed8 100644 --- a/src/test/java/org/influxdb/InfluxDBTest.java +++ b/src/test/java/org/influxdb/InfluxDBTest.java @@ -1,6 +1,7 @@ package org.influxdb; import org.influxdb.InfluxDB.LogLevel; +import org.influxdb.InfluxDB.ResponseFormat; import org.influxdb.dto.BatchPoints; import org.influxdb.dto.BoundParameterQuery.QueryBuilder; import org.influxdb.dto.Point; @@ -13,6 +14,7 @@ import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable; import org.junit.platform.runner.JUnitPlatform; import org.junit.runner.RunWith; @@ -41,9 +43,9 @@ @RunWith(JUnitPlatform.class) public class InfluxDBTest { - private InfluxDB influxDB; + InfluxDB influxDB; private final static int UDP_PORT = 8089; - private final static String UDP_DATABASE = "udp"; + final static String UDP_DATABASE = "udp"; /** * Create a influxDB connection before all tests start. @@ -721,49 +723,44 @@ public void testWriteEnableGzipAndDisableGzip() { */ @Test public void testChunking() throws InterruptedException { - if (this.influxDB.version().startsWith("0.") || this.influxDB.version().startsWith("1.0")) { - // do not test version 0.13 and 1.0 - return; - } - String dbName = "write_unittest_" + System.currentTimeMillis(); - this.influxDB.createDatabase(dbName); - String rp = TestUtils.defaultRetentionPolicy(this.influxDB.version()); - BatchPoints batchPoints = BatchPoints.database(dbName).retentionPolicy(rp).build(); - Point point1 = Point.measurement("disk").tag("atag", "a").addField("used", 60L).addField("free", 1L).build(); - Point point2 = Point.measurement("disk").tag("atag", "b").addField("used", 70L).addField("free", 2L).build(); - Point point3 = Point.measurement("disk").tag("atag", "c").addField("used", 80L).addField("free", 3L).build(); - batchPoints.point(point1); - batchPoints.point(point2); - batchPoints.point(point3); - this.influxDB.write(batchPoints); - - Thread.sleep(2000); - final BlockingQueue queue = new LinkedBlockingQueue<>(); - Query query = new Query("SELECT * FROM disk", dbName); - this.influxDB.query(query, 2, new Consumer() { - @Override - public void accept(QueryResult result) { - queue.add(result); - }}); - - Thread.sleep(2000); - this.influxDB.deleteDatabase(dbName); - - QueryResult result = queue.poll(20, TimeUnit.SECONDS); - Assertions.assertNotNull(result); - System.out.println(result); - Assertions.assertEquals(2, result.getResults().get(0).getSeries().get(0).getValues().size()); - - result = queue.poll(20, TimeUnit.SECONDS); - Assertions.assertNotNull(result); - System.out.println(result); - Assertions.assertEquals(1, result.getResults().get(0).getSeries().get(0).getValues().size()); - - result = queue.poll(20, TimeUnit.SECONDS); - Assertions.assertNotNull(result); - System.out.println(result); - Assertions.assertEquals("DONE", result.getError()); - } + if (this.influxDB.version().startsWith("0.") || this.influxDB.version().startsWith("1.0")) { + // do not test version 0.13 and 1.0 + return; + } + String dbName = "write_unittest_" + System.currentTimeMillis(); + this.influxDB.createDatabase(dbName); + String rp = TestUtils.defaultRetentionPolicy(this.influxDB.version()); + BatchPoints batchPoints = BatchPoints.database(dbName).retentionPolicy(rp).build(); + Point point1 = Point.measurement("disk").tag("atag", "a").addField("used", 60L).addField("free", 1L).build(); + Point point2 = Point.measurement("disk").tag("atag", "b").addField("used", 70L).addField("free", 2L).build(); + Point point3 = Point.measurement("disk").tag("atag", "c").addField("used", 80L).addField("free", 3L).build(); + batchPoints.point(point1); + batchPoints.point(point2); + batchPoints.point(point3); + this.influxDB.write(batchPoints); + + Thread.sleep(2000); + final BlockingQueue queue = new LinkedBlockingQueue<>(); + Query query = new Query("SELECT * FROM disk", dbName); + this.influxDB.query(query, 2, new Consumer() { + @Override + public void accept(QueryResult result) { + queue.add(result); + }}); + + Thread.sleep(2000); + this.influxDB.deleteDatabase(dbName); + + QueryResult result = queue.poll(20, TimeUnit.SECONDS); + Assertions.assertNotNull(result); + System.out.println(result); + Assertions.assertEquals(2, result.getResults().get(0).getSeries().get(0).getValues().size()); + + result = queue.poll(20, TimeUnit.SECONDS); + Assertions.assertNotNull(result); + System.out.println(result); + Assertions.assertEquals(1, result.getResults().get(0).getSeries().get(0).getValues().size()); + } /** * Test chunking edge case. @@ -882,4 +879,16 @@ public void testIsBatchEnabledWithConsistency() { }, InfluxDB.ConsistencyLevel.ALL); Assertions.assertTrue(this.influxDB.isBatchEnabled()); } + + /** + * Test initialize InfluxDBImpl with MessagePack format for InfluxDB versions before 1.4 will throw exception + */ + @Test + @EnabledIfEnvironmentVariable(named = "INFLUXDB_VERSION", matches = "1\\.3|1\\.2|1\\.1") + public void testMessagePackOnOldDbVersion() { + Assertions.assertThrows(InfluxDBException.class, () -> { + TestUtils.connectToInfluxDB(ResponseFormat.MSGPACK); + }); + } + } diff --git a/src/test/java/org/influxdb/MessagePackBatchOptionsTest.java b/src/test/java/org/influxdb/MessagePackBatchOptionsTest.java new file mode 100644 index 000000000..305b0a375 --- /dev/null +++ b/src/test/java/org/influxdb/MessagePackBatchOptionsTest.java @@ -0,0 +1,271 @@ +package org.influxdb; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.atLeastOnce; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + +import java.io.IOException; +import java.util.List; +import java.util.function.BiConsumer; + +import org.influxdb.InfluxDB.ResponseFormat; +import org.influxdb.InfluxDBException.DatabaseNotFoundException; +import org.influxdb.dto.BatchPoints; +import org.influxdb.dto.Point; +import org.influxdb.dto.Query; +import org.influxdb.dto.QueryResult; +import org.influxdb.dto.QueryResult.Series; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable; +import org.junit.platform.runner.JUnitPlatform; +import org.junit.runner.RunWith; +import org.mockito.invocation.InvocationOnMock; + +/** + * Test the InfluxDB API over MessagePack format + * + * @author hoan.le [at] bonitoo.io + * + */ +@RunWith(JUnitPlatform.class) +@EnabledIfEnvironmentVariable(named = "INFLUXDB_VERSION", matches = "1\\.6|1\\.5|1\\.4") +public class MessagePackBatchOptionsTest extends BatchOptionsTest { + + @Override + @BeforeEach + public void setUp() throws InterruptedException, IOException { + influxDB = TestUtils.connectToInfluxDB(ResponseFormat.MSGPACK); + } + + /** + * Test the implementation of {@link BatchOptions#flushDuration(int)} }. + * + * @throws InterruptedException + */ + @Override + @Test + public void testFlushDuration() throws InterruptedException { + String dbName = "write_unittest_" + System.currentTimeMillis(); + try { + BatchOptions options = BatchOptions.DEFAULTS.flushDuration(200); + influxDB.createDatabase(dbName); + influxDB.setDatabase(dbName); + influxDB.enableBatch(options); + write20Points(influxDB); + + // check no points writen to DB before the flush duration + QueryResult result = influxDB.query(new Query("select * from weather", dbName)); + List series = result.getResults().get(0).getSeries(); + Assertions.assertNull(series); + Assertions.assertNull(result.getResults().get(0).getError()); + + // wait for at least one flush + Thread.sleep(500); + result = influxDB.query(new Query("select * from weather", dbName)); + + // check points written already to DB + Assertions.assertEquals(20, result.getResults().get(0).getSeries().get(0).getValues().size()); + } finally { + this.influxDB.disableBatch(); + this.influxDB.deleteDatabase(dbName); + } + } + + /** + * Test the implementation of {@link BatchOptions#jitterDuration(int)} }. + * + * @throws InterruptedException + */ + @Override + @Test + public void testJitterDuration() throws InterruptedException { + + String dbName = "write_unittest_" + System.currentTimeMillis(); + try { + BatchOptions options = BatchOptions.DEFAULTS.flushDuration(100).jitterDuration(500); + influxDB.createDatabase(dbName); + influxDB.setDatabase(dbName); + influxDB.enableBatch(options); + write20Points(influxDB); + + Thread.sleep(100); + + QueryResult result = influxDB.query(new Query("select * from weather", dbName)); + List series = result.getResults().get(0).getSeries(); + Assertions.assertNull(series); + Assertions.assertNull(result.getResults().get(0).getError()); + + // wait for at least one flush + Thread.sleep(1000); + result = influxDB.query(new Query("select * from weather", dbName)); + Assertions.assertEquals(20, result.getResults().get(0).getSeries().get(0).getValues().size()); + } finally { + influxDB.disableBatch(); + influxDB.deleteDatabase(dbName); + } + } + + /** + * Test the implementation of {@link BatchOptions#bufferLimit(int)} }. use a + * bufferLimit that less than actions, then OneShotBatchWrite is used + */ + @Override + @Test + public void testBufferLimitLessThanActions() throws InterruptedException { + + TestAnswer answer = new TestAnswer() { + + InfluxDBException influxDBException = InfluxDBException + .buildExceptionForErrorState(createErrorBody(InfluxDBException.CACHE_MAX_MEMORY_SIZE_EXCEEDED_ERROR)); + + @Override + protected void check(InvocationOnMock invocation) { + if ((Boolean) params.get("throwException")) { + throw influxDBException; + } + } + }; + + InfluxDB spy = spy(influxDB); + // the spied influxDB.write(BatchPoints) will always throw InfluxDBException + doAnswer(answer).when(spy).write(any(BatchPoints.class)); + + String dbName = "write_unittest_" + System.currentTimeMillis(); + try { + answer.params.put("throwException", true); + BiConsumer, Throwable> mockHandler = mock(BiConsumer.class); + BatchOptions options = BatchOptions.DEFAULTS.bufferLimit(3).actions(4).flushDuration(100) + .exceptionHandler(mockHandler); + + spy.createDatabase(dbName); + spy.setDatabase(dbName); + spy.enableBatch(options); + write20Points(spy); + + Thread.sleep(300); + verify(mockHandler, atLeastOnce()).accept(any(), any()); + + QueryResult result = spy.query(new Query("select * from weather", dbName)); + // assert 0 point written because of InfluxDBException and + // OneShotBatchWriter did not retry + List series = result.getResults().get(0).getSeries(); + Assertions.assertNull(series); + Assertions.assertNull(result.getResults().get(0).getError()); + + answer.params.put("throwException", false); + write20Points(spy); + Thread.sleep(300); + result = spy.query(new Query("select * from weather", dbName)); + // assert all 20 points written to DB due to no exception + Assertions.assertEquals(20, result.getResults().get(0).getSeries().get(0).getValues().size()); + } finally { + spy.disableBatch(); + spy.deleteDatabase(dbName); + } + } + + /** + * Test the implementation of {@link BatchOptions#bufferLimit(int)} }. + * use a bufferLimit that greater than actions, then RetryCapableBatchWriter is used + */ + @Override + @Test + public void testBufferLimitGreaterThanActions() throws InterruptedException { + TestAnswer answer = new TestAnswer() { + + int nthCall = 0; + InfluxDBException cacheMaxMemorySizeExceededException = InfluxDBException.buildExceptionForErrorState(createErrorBody(InfluxDBException.CACHE_MAX_MEMORY_SIZE_EXCEEDED_ERROR)); + @Override + protected void check(InvocationOnMock invocation) { + + switch (nthCall++) { + case 0: + throw InfluxDBException.buildExceptionForErrorState(createErrorBody(InfluxDBException.DATABASE_NOT_FOUND_ERROR)); + case 1: + throw InfluxDBException.buildExceptionForErrorState(createErrorBody(InfluxDBException.CACHE_MAX_MEMORY_SIZE_EXCEEDED_ERROR)); + default: + break; + } + } + }; + + InfluxDB spy = spy(influxDB); + doAnswer(answer).when(spy).write(any(BatchPoints.class)); + + String dbName = "write_unittest_" + System.currentTimeMillis(); + try { + BiConsumer, Throwable> mockHandler = mock(BiConsumer.class); + BatchOptions options = BatchOptions.DEFAULTS.bufferLimit(10).actions(8).flushDuration(100).exceptionHandler(mockHandler); + + spy.createDatabase(dbName); + spy.setDatabase(dbName); + spy.enableBatch(options); + writeSomePoints(spy, "measurement1", 0, 5); + + Thread.sleep(300); + verify(mockHandler, atLeastOnce()).accept(any(), any()); + + QueryResult result = spy.query(new Query("select * from measurement1", dbName)); + //assert 0 point written because of non-retry capable DATABASE_NOT_FOUND_ERROR and RetryCapableBatchWriter did not retry + List series = result.getResults().get(0).getSeries(); + Assertions.assertNull(series); + Assertions.assertNull(result.getResults().get(0).getError()); + + writeSomePoints(spy, "measurement2", 0, 5); + + Thread.sleep(300); + + result = spy.query(new Query("select * from measurement2", dbName)); + //assert all 6 point written because of retry capable CACHE_MAX_MEMORY_SIZE_EXCEEDED_ERROR and RetryCapableBatchWriter did retry + Assertions.assertEquals(6, result.getResults().get(0).getSeries().get(0).getValues().size()); + } + finally { + spy.disableBatch(); + spy.deleteDatabase(dbName); + } + } + + + /** + * Test the implementation of {@link BatchOptions#exceptionHandler(BiConsumer)} }. + * @throws InterruptedException + */ + @Override + @Test + public void testHandlerOnRetryImpossible() throws InterruptedException { + + String dbName = "write_unittest_" + System.currentTimeMillis(); + InfluxDB spy = spy(influxDB); + doThrow(DatabaseNotFoundException.class).when(spy).write(any(BatchPoints.class)); + + try { + BiConsumer, Throwable> mockHandler = mock(BiConsumer.class); + BatchOptions options = BatchOptions.DEFAULTS.exceptionHandler(mockHandler).flushDuration(100); + + spy.createDatabase(dbName); + spy.setDatabase(dbName); + spy.enableBatch(options); + + writeSomePoints(spy, 1); + + Thread.sleep(200); + verify(mockHandler, times(1)).accept(any(), any()); + + QueryResult result = influxDB.query(new Query("select * from weather", dbName)); + List series = result.getResults().get(0).getSeries(); + Assertions.assertNull(series); + Assertions.assertNull(result.getResults().get(0).getError()); + } finally { + spy.disableBatch(); + spy.deleteDatabase(dbName); + } + + } +} diff --git a/src/test/java/org/influxdb/MessagePackInfluxDBTest.java b/src/test/java/org/influxdb/MessagePackInfluxDBTest.java new file mode 100644 index 000000000..50662d62c --- /dev/null +++ b/src/test/java/org/influxdb/MessagePackInfluxDBTest.java @@ -0,0 +1,186 @@ +package org.influxdb; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.TimeUnit; + +import org.influxdb.InfluxDB.ResponseFormat; +import org.influxdb.dto.BatchPoints; +import org.influxdb.dto.Point; +import org.influxdb.dto.Query; +import org.influxdb.dto.QueryResult; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable; +import org.junit.platform.runner.JUnitPlatform; +import org.junit.runner.RunWith; + +/** + * Test the InfluxDB API over MessagePack format + * + * @author hoan.le [at] bonitoo.io + * + */ +@RunWith(JUnitPlatform.class) +@EnabledIfEnvironmentVariable(named = "INFLUXDB_VERSION", matches = "1\\.6|1\\.5|1\\.4") +public class MessagePackInfluxDBTest extends InfluxDBTest { + /** + * Create a influxDB connection before all tests start. + * + * @throws InterruptedException + * @throws IOException + */ + @Override + @BeforeEach + public void setUp() throws InterruptedException, IOException { + influxDB = TestUtils.connectToInfluxDB(ResponseFormat.MSGPACK); + influxDB.createDatabase(UDP_DATABASE); + } + + /** + * Tests writing points using the time precision feature + * @throws Exception + */ + @Override + @Test + public void testWriteBatchWithPrecision() throws Exception { + // GIVEN a database and a measurement + String dbName = "precision_unittest_" + System.currentTimeMillis(); + this.influxDB.createDatabase(dbName); + + String rp = TestUtils.defaultRetentionPolicy(this.influxDB.version()); + + String measurement = TestUtils.getRandomMeasurement(); + + long t1 = 1485273600; + Point p1 = Point + .measurement(measurement) + .addField("foo", 1d) + .tag("device", "one") + .time(t1, TimeUnit.SECONDS).build(); // 2017-01-27T16:00:00 + + long t2 = 1485277200; + Point p2 = Point + .measurement(measurement) + .addField("foo", 2d) + .tag("device", "two") + .time(t2, TimeUnit.SECONDS).build(); // 2017-01-27T17:00:00 + + long t3 = 1485280800; + Point p3 = Point + .measurement(measurement) + .addField("foo", 3d) + .tag("device", "three") + .time(t3, TimeUnit.SECONDS).build(); // 2017-01-27T18:00:00 + + BatchPoints batchPoints = BatchPoints + .database(dbName) + .retentionPolicy(rp) + .precision(TimeUnit.SECONDS) + .points(p1, p2, p3) + .build(); + + // WHEN I write the batch + this.influxDB.write(batchPoints); + + // THEN the measure points have a timestamp with second precision + QueryResult queryResult = this.influxDB.query(new Query("SELECT * FROM " + measurement, dbName)); + Assertions.assertEquals(queryResult.getResults().get(0).getSeries().get(0).getValues().get(0).get(0), t1); + Assertions.assertEquals(queryResult.getResults().get(0).getSeries().get(0).getValues().get(1).get(0), t2); + Assertions.assertEquals(queryResult.getResults().get(0).getSeries().get(0).getValues().get(2).get(0), t3); + + this.influxDB.deleteDatabase(dbName); + } + + @Override + @Test + public void testWriteBatchWithoutPrecision() throws Exception { + // GIVEN a database and a measurement + String dbName = "precision_unittest_" + System.currentTimeMillis(); + this.influxDB.createDatabase(dbName); + + String rp = TestUtils.defaultRetentionPolicy(this.influxDB.version()); + + String measurement = TestUtils.getRandomMeasurement(); + + // GIVEN a batch of points that has no specific precision + long t1 = 1485273600000000100L; + Point p1 = Point + .measurement(measurement) + .addField("foo", 1d) + .tag("device", "one") + .time(t1, TimeUnit.NANOSECONDS).build(); // 2017-01-27T16:00:00.000000100Z + Double timeP1 = Double.valueOf(t1); + + long t2 = 1485277200000000200L; + Point p2 = Point + .measurement(measurement) + .addField("foo", 2d) + .tag("device", "two") + .time(t2, TimeUnit.NANOSECONDS).build(); // 2017-01-27T17:00:00.000000200Z + Double timeP2 = Double.valueOf(t2); + + long t3 = 1485280800000000300L; + Point p3 = Point + .measurement(measurement) + .addField("foo", 3d) + .tag("device", "three") + .time(t3, TimeUnit.NANOSECONDS).build(); // 2017-01-27T18:00:00.000000300Z + Double timeP3 = Double.valueOf(t3); + + BatchPoints batchPoints = BatchPoints + .database(dbName) + .retentionPolicy(rp) + .points(p1, p2, p3) + .build(); + + // WHEN I write the batch + this.influxDB.write(batchPoints); + + // THEN the measure points have a timestamp with second precision + QueryResult queryResult = this.influxDB.query(new Query("SELECT * FROM " + measurement, dbName), TimeUnit.NANOSECONDS); + Assertions.assertEquals(queryResult.getResults().get(0).getSeries().get(0).getValues().size(), 3); + Double value = Double.valueOf(queryResult.getResults().get(0).getSeries().get(0).getValues().get(0).get(0).toString()); + Assertions.assertEquals(value, timeP1); + value = Double.valueOf(queryResult.getResults().get(0).getSeries().get(0).getValues().get(1).get(0).toString()); + Assertions.assertEquals(value, timeP2); + value = Double.valueOf(queryResult.getResults().get(0).getSeries().get(0).getValues().get(2).get(0).toString()); + Assertions.assertEquals(value, timeP3); + + this.influxDB.deleteDatabase(dbName); + } + + @Override + @Test + public void testWriteRecordsWithPrecision() throws Exception { + // GIVEN a database and a measurement + String dbName = "precision_unittest_" + System.currentTimeMillis(); + this.influxDB.createDatabase(dbName); + + String rp = TestUtils.defaultRetentionPolicy(this.influxDB.version()); + + String measurement = TestUtils.getRandomMeasurement(); + List records = new ArrayList<>(); + records.add(measurement + ",atag=test1 idle=100,usertime=10,system=1 1485273600"); + long timeP1 = 1485273600; + + records.add(measurement + ",atag=test2 idle=200,usertime=20,system=2 1485277200"); + long timeP2 = 1485277200; + + records.add(measurement + ",atag=test3 idle=300,usertime=30,system=3 1485280800"); + long timeP3 = 1485280800; + + // WHEN I write the batch + this.influxDB.write(dbName, rp, InfluxDB.ConsistencyLevel.ONE, TimeUnit.SECONDS, records); + + // THEN the measure points have a timestamp with second precision + QueryResult queryResult = this.influxDB.query(new Query("SELECT * FROM " + measurement, dbName)); + Assertions.assertEquals(queryResult.getResults().get(0).getSeries().get(0).getValues().size(), 3); + Assertions.assertEquals(queryResult.getResults().get(0).getSeries().get(0).getValues().get(0).get(0), timeP1); + Assertions.assertEquals(queryResult.getResults().get(0).getSeries().get(0).getValues().get(1).get(0), timeP2); + Assertions.assertEquals(queryResult.getResults().get(0).getSeries().get(0).getValues().get(2).get(0), timeP3); + this.influxDB.deleteDatabase(dbName); + } +} diff --git a/src/test/java/org/influxdb/TestUtils.java b/src/test/java/org/influxdb/TestUtils.java index 490087fd9..25112ac44 100644 --- a/src/test/java/org/influxdb/TestUtils.java +++ b/src/test/java/org/influxdb/TestUtils.java @@ -1,6 +1,8 @@ package org.influxdb; import okhttp3.OkHttpClient; + +import org.influxdb.InfluxDB.ResponseFormat; import org.influxdb.dto.Pong; import java.io.IOException; @@ -52,14 +54,18 @@ public static String defaultRetentionPolicy(String version) { } public static InfluxDB connectToInfluxDB() throws InterruptedException, IOException { - return connectToInfluxDB(null, null); + return connectToInfluxDB(null, null, ResponseFormat.JSON); } + public static InfluxDB connectToInfluxDB(ResponseFormat responseFormat) throws InterruptedException, IOException { + return connectToInfluxDB(null, null, responseFormat); + } public static InfluxDB connectToInfluxDB(String apiUrl) throws InterruptedException, IOException { - return connectToInfluxDB(new OkHttpClient.Builder(), apiUrl); + return connectToInfluxDB(new OkHttpClient.Builder(), apiUrl, ResponseFormat.JSON); } - public static InfluxDB connectToInfluxDB(final OkHttpClient.Builder client, String apiUrl) throws InterruptedException, IOException { + public static InfluxDB connectToInfluxDB(final OkHttpClient.Builder client, String apiUrl, + ResponseFormat responseFormat) throws InterruptedException, IOException { OkHttpClient.Builder clientToUse; if (client == null) { clientToUse = new OkHttpClient.Builder(); @@ -72,7 +78,7 @@ public static InfluxDB connectToInfluxDB(final OkHttpClient.Builder client, Stri } else { apiUrlToUse = apiUrl; } - InfluxDB influxDB = InfluxDBFactory.connect(apiUrlToUse, "admin", "admin", clientToUse); + InfluxDB influxDB = InfluxDBFactory.connect(apiUrlToUse, "admin", "admin", clientToUse, responseFormat); boolean influxDBstarted = false; do { Pong response; From fc5f8745bd7ad00987f7e916631307f038b28d55 Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Sat, 21 Jul 2018 23:04:06 +0700 Subject: [PATCH 215/745] Implement Issue #389 : Support for MessagePack refactor + add javadocs to make MessagePackTraverser more unambiguous --- .../MessagePackResponseBodyConverter.java | 5 +- .../msgpack/MessagePackTraverser.java | 54 ++++++++++++++----- 2 files changed, 42 insertions(+), 17 deletions(-) diff --git a/src/main/java/org/influxdb/msgpack/MessagePackResponseBodyConverter.java b/src/main/java/org/influxdb/msgpack/MessagePackResponseBodyConverter.java index ae6d49e9f..25446d051 100644 --- a/src/main/java/org/influxdb/msgpack/MessagePackResponseBodyConverter.java +++ b/src/main/java/org/influxdb/msgpack/MessagePackResponseBodyConverter.java @@ -19,10 +19,7 @@ public class MessagePackResponseBodyConverter implements Converter traverse(final InputStream is) throws IOException { + /** + * Traverse over the whole message pack stream. + * This method can be used for converting query results in chunk. + * + * @param is + * The MessagePack format input stream + * @return an Iterable over the QueryResult objects + * + */ + public Iterable traverse(final InputStream is) { MessageUnpacker unpacker = MessagePack.newDefaultUnpacker(is); return () -> { @@ -45,23 +54,42 @@ public boolean hasNext() { @Override public QueryResult next() { - QueryResult queryResult = new QueryResult(); - QueryResultModelPath queryResultPath = new QueryResultModelPath(); - queryResultPath.add("queryResult", queryResult); - try { - traverse(unpacker, queryResultPath, 1); - } catch (IOException e) { - throw new InfluxDBException(e); - } - return queryResult; + return parse(unpacker); } }; }; } - void traverse(final MessageUnpacker unpacker, final QueryResultModelPath queryResultPath, - final int readAmount) throws IOException { + /** + * Parse the message pack stream. + * This method can be used for converting query + * result from normal query response where exactly one QueryResult returned + * + * @param is + * The MessagePack format input stream + * @return QueryResult + * + */ + public QueryResult parse(final InputStream is) { + MessageUnpacker unpacker = MessagePack.newDefaultUnpacker(is); + return parse(unpacker); + } + + private QueryResult parse(final MessageUnpacker unpacker) { + QueryResult queryResult = new QueryResult(); + QueryResultModelPath queryResultPath = new QueryResultModelPath(); + queryResultPath.add("queryResult", queryResult); + try { + traverse(unpacker, queryResultPath, 1); + } catch (IOException e) { + throw new InfluxDBException(e); + } + return queryResult; + } + + void traverse(final MessageUnpacker unpacker, final QueryResultModelPath queryResultPath, final int readAmount) + throws IOException { int amount = 0; while (unpacker.hasNext() && amount < readAmount) { From 9d7ab06f833a9492b07cb9ad2e59511d5fe88e55 Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Mon, 23 Jul 2018 14:48:02 +0700 Subject: [PATCH 216/745] Implement Issue #389 : Support for MessagePack test for MessagePackTraverser --- pom.xml | 3 + .../msgpack/MessagePackTraverserTest.java | 77 ++++++++++++++++++ .../org/influxdb/msgpack/msgpack_1.bin | Bin 0 -> 240 bytes .../org/influxdb/msgpack/msgpack_2.bin | Bin 0 -> 189 bytes .../org/influxdb/msgpack/msgpack_3.bin | Bin 0 -> 33 bytes 5 files changed, 80 insertions(+) create mode 100644 src/test/java/org/influxdb/msgpack/MessagePackTraverserTest.java create mode 100644 src/test/resources/org/influxdb/msgpack/msgpack_1.bin create mode 100644 src/test/resources/org/influxdb/msgpack/msgpack_2.bin create mode 100644 src/test/resources/org/influxdb/msgpack/msgpack_3.bin diff --git a/pom.xml b/pom.xml index 37e53162a..390aa27d4 100644 --- a/pom.xml +++ b/pom.xml @@ -73,6 +73,9 @@ docker-compose.yml + + src/test/resources + diff --git a/src/test/java/org/influxdb/msgpack/MessagePackTraverserTest.java b/src/test/java/org/influxdb/msgpack/MessagePackTraverserTest.java new file mode 100644 index 000000000..67d6b41db --- /dev/null +++ b/src/test/java/org/influxdb/msgpack/MessagePackTraverserTest.java @@ -0,0 +1,77 @@ +package org.influxdb.msgpack; + +import java.util.Iterator; +import java.util.List; + +import org.influxdb.dto.QueryResult; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable; +import org.junit.platform.runner.JUnitPlatform; +import org.junit.runner.RunWith; + +import static org.junit.jupiter.api.Assertions.*; + +@RunWith(JUnitPlatform.class) +@EnabledIfEnvironmentVariable(named = "INFLUXDB_VERSION", matches = "1\\.6|1\\.5|1\\.4") +public class MessagePackTraverserTest { + + @Test + public void testTraverseMethod() { + MessagePackTraverser traverser = new MessagePackTraverser(); + + /* a json-like view of msgpack_1.bin + + {"results":[{"statement_id":0,"series":[{"name":"disk","columns":["time","atag","free","used"], + "values":[[(5,0x00005b556c-252f-23-6438),"a",1,60],[(5,0x00005b556c-252f-23-6438),"b",2,70]],"partial":true}],"partial":true}]} + {"results":[{"statement_id":0,"series":[{"name":"disk","columns":["time","atag","free","used"],"values":[[(5,0x00005b556c-252f-23-6438),"c",3,80]]}]}]} + + */ + + Iterator iter = traverser.traverse(MessagePackTraverserTest.class.getResourceAsStream("msgpack_1.bin")).iterator(); + assertTrue(iter.hasNext()); + QueryResult result = iter.next(); + List> values = result.getResults().get(0).getSeries().get(0).getValues(); + Assertions.assertEquals(2, values.size()); + assertEquals(1532325083L, values.get(0).get(0)); + assertEquals("b", values.get(1).get(1)); + + assertTrue(iter.hasNext()); + result = iter.next(); + values = result.getResults().get(0).getSeries().get(0).getValues(); + Assertions.assertEquals(1, values.size()); + assertEquals(3, values.get(0).get(2)); + + assertFalse(iter.hasNext()); + } + + @Test + public void testParseMethodOnNonEmptyResult() { + MessagePackTraverser traverser = new MessagePackTraverser(); + /* a json-like view of msgpack_2.bin + + {"results":[{"statement_id":0,"series":[{"name":"measurement_957996674028300","columns":["time","device","foo"], + "values":[[(5,0x000058-797a00000),"one",1.0],[(5,0x000058-79-78100000),"two",2.0],[(5,0x000058-79-6a200000),"three",3.0]]}]}]} + */ + QueryResult queryResult = traverser.parse(MessagePackTraverserTest.class.getResourceAsStream("msgpack_2.bin")); + List> values = queryResult.getResults().get(0).getSeries().get(0).getValues(); + Assertions.assertEquals(3, values.size()); + assertEquals(1485273600L, values.get(0).get(0)); + assertEquals("two", values.get(1).get(1)); + assertEquals(3.0, values.get(2).get(2)); + } + + @Test + public void testParseMethodOnEmptyResult() { + MessagePackTraverser traverser = new MessagePackTraverser(); + /* a json-like view of msgpack_3.bin + + {"results":[{"statement_id":0,"series":[]}]} + + */ + QueryResult queryResult = traverser.parse(MessagePackTraverserTest.class.getResourceAsStream("msgpack_3.bin")); + System.out.println(); + assertNull(queryResult.getResults().get(0).getSeries()); + + } +} diff --git a/src/test/resources/org/influxdb/msgpack/msgpack_1.bin b/src/test/resources/org/influxdb/msgpack/msgpack_1.bin new file mode 100644 index 0000000000000000000000000000000000000000..48e8c65b2b406887467b7a65891cd39d947ce9d7 GIT binary patch literal 240 zcmZo%UX)r~np09dv3X5#Nn%NAZfaghd}a#6vf|XD%+%tEElcteb5ob3WEN*HPtMON z&CM&GvZN#vD3n-|n7$;fC^dCSX>n@Gva-aS($wNfQ;zenG5|qzXwGf@yK^iSCNkP! kh$S((EiXtcD#=XDISgks!kyBD-6_qaIb|Y-waLr@0MG1ZW&i*H literal 0 HcmV?d00001 diff --git a/src/test/resources/org/influxdb/msgpack/msgpack_2.bin b/src/test/resources/org/influxdb/msgpack/msgpack_2.bin new file mode 100644 index 0000000000000000000000000000000000000000..8574f827025a5c83d99a5368e9c57831b16e2fef GIT binary patch literal 189 zcmZo%UX)r~np09dv1v_lNn%NAZfaghd}a#6vf|XD%+%tE%}eqUb5nQcrY07b7C~e! zP0cMW&CJYA42&#{4Gfki=jW8><`qv~Qj(dQx-2ENEHgQEaaw-<|D;mXzn8c7VtsNlsG$2`w$jC`wH|?Z5$%2LNRy BJrMu^ literal 0 HcmV?d00001 diff --git a/src/test/resources/org/influxdb/msgpack/msgpack_3.bin b/src/test/resources/org/influxdb/msgpack/msgpack_3.bin new file mode 100644 index 0000000000000000000000000000000000000000..a474f573ee790f4d849b133a15ebe5019749a104 GIT binary patch literal 33 ocmZo%UX)r~np09dv1v_lNn%NAZfaghd}a#6vf|XD%+%ru00QI=yZ`_I literal 0 HcmV?d00001 From b025e47fcab9033920835ffe405b7bc582f042a8 Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Mon, 23 Jul 2018 15:17:32 +0700 Subject: [PATCH 217/745] Implement Issue #389 : Support for MessagePack MessagePackBatchOptionsTest simply extends BatchOptionsTest --- .../influxdb/MessagePackBatchOptionsTest.java | 247 +----------------- 1 file changed, 1 insertion(+), 246 deletions(-) diff --git a/src/test/java/org/influxdb/MessagePackBatchOptionsTest.java b/src/test/java/org/influxdb/MessagePackBatchOptionsTest.java index 305b0a375..4cb8c0238 100644 --- a/src/test/java/org/influxdb/MessagePackBatchOptionsTest.java +++ b/src/test/java/org/influxdb/MessagePackBatchOptionsTest.java @@ -1,35 +1,15 @@ package org.influxdb; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.atLeastOnce; -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; - import java.io.IOException; -import java.util.List; -import java.util.function.BiConsumer; import org.influxdb.InfluxDB.ResponseFormat; -import org.influxdb.InfluxDBException.DatabaseNotFoundException; -import org.influxdb.dto.BatchPoints; -import org.influxdb.dto.Point; -import org.influxdb.dto.Query; -import org.influxdb.dto.QueryResult; -import org.influxdb.dto.QueryResult.Series; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable; import org.junit.platform.runner.JUnitPlatform; import org.junit.runner.RunWith; -import org.mockito.invocation.InvocationOnMock; /** - * Test the InfluxDB API over MessagePack format + * Test the BatchOptions with MessagePack format * * @author hoan.le [at] bonitoo.io * @@ -43,229 +23,4 @@ public class MessagePackBatchOptionsTest extends BatchOptionsTest { public void setUp() throws InterruptedException, IOException { influxDB = TestUtils.connectToInfluxDB(ResponseFormat.MSGPACK); } - - /** - * Test the implementation of {@link BatchOptions#flushDuration(int)} }. - * - * @throws InterruptedException - */ - @Override - @Test - public void testFlushDuration() throws InterruptedException { - String dbName = "write_unittest_" + System.currentTimeMillis(); - try { - BatchOptions options = BatchOptions.DEFAULTS.flushDuration(200); - influxDB.createDatabase(dbName); - influxDB.setDatabase(dbName); - influxDB.enableBatch(options); - write20Points(influxDB); - - // check no points writen to DB before the flush duration - QueryResult result = influxDB.query(new Query("select * from weather", dbName)); - List series = result.getResults().get(0).getSeries(); - Assertions.assertNull(series); - Assertions.assertNull(result.getResults().get(0).getError()); - - // wait for at least one flush - Thread.sleep(500); - result = influxDB.query(new Query("select * from weather", dbName)); - - // check points written already to DB - Assertions.assertEquals(20, result.getResults().get(0).getSeries().get(0).getValues().size()); - } finally { - this.influxDB.disableBatch(); - this.influxDB.deleteDatabase(dbName); - } - } - - /** - * Test the implementation of {@link BatchOptions#jitterDuration(int)} }. - * - * @throws InterruptedException - */ - @Override - @Test - public void testJitterDuration() throws InterruptedException { - - String dbName = "write_unittest_" + System.currentTimeMillis(); - try { - BatchOptions options = BatchOptions.DEFAULTS.flushDuration(100).jitterDuration(500); - influxDB.createDatabase(dbName); - influxDB.setDatabase(dbName); - influxDB.enableBatch(options); - write20Points(influxDB); - - Thread.sleep(100); - - QueryResult result = influxDB.query(new Query("select * from weather", dbName)); - List series = result.getResults().get(0).getSeries(); - Assertions.assertNull(series); - Assertions.assertNull(result.getResults().get(0).getError()); - - // wait for at least one flush - Thread.sleep(1000); - result = influxDB.query(new Query("select * from weather", dbName)); - Assertions.assertEquals(20, result.getResults().get(0).getSeries().get(0).getValues().size()); - } finally { - influxDB.disableBatch(); - influxDB.deleteDatabase(dbName); - } - } - - /** - * Test the implementation of {@link BatchOptions#bufferLimit(int)} }. use a - * bufferLimit that less than actions, then OneShotBatchWrite is used - */ - @Override - @Test - public void testBufferLimitLessThanActions() throws InterruptedException { - - TestAnswer answer = new TestAnswer() { - - InfluxDBException influxDBException = InfluxDBException - .buildExceptionForErrorState(createErrorBody(InfluxDBException.CACHE_MAX_MEMORY_SIZE_EXCEEDED_ERROR)); - - @Override - protected void check(InvocationOnMock invocation) { - if ((Boolean) params.get("throwException")) { - throw influxDBException; - } - } - }; - - InfluxDB spy = spy(influxDB); - // the spied influxDB.write(BatchPoints) will always throw InfluxDBException - doAnswer(answer).when(spy).write(any(BatchPoints.class)); - - String dbName = "write_unittest_" + System.currentTimeMillis(); - try { - answer.params.put("throwException", true); - BiConsumer, Throwable> mockHandler = mock(BiConsumer.class); - BatchOptions options = BatchOptions.DEFAULTS.bufferLimit(3).actions(4).flushDuration(100) - .exceptionHandler(mockHandler); - - spy.createDatabase(dbName); - spy.setDatabase(dbName); - spy.enableBatch(options); - write20Points(spy); - - Thread.sleep(300); - verify(mockHandler, atLeastOnce()).accept(any(), any()); - - QueryResult result = spy.query(new Query("select * from weather", dbName)); - // assert 0 point written because of InfluxDBException and - // OneShotBatchWriter did not retry - List series = result.getResults().get(0).getSeries(); - Assertions.assertNull(series); - Assertions.assertNull(result.getResults().get(0).getError()); - - answer.params.put("throwException", false); - write20Points(spy); - Thread.sleep(300); - result = spy.query(new Query("select * from weather", dbName)); - // assert all 20 points written to DB due to no exception - Assertions.assertEquals(20, result.getResults().get(0).getSeries().get(0).getValues().size()); - } finally { - spy.disableBatch(); - spy.deleteDatabase(dbName); - } - } - - /** - * Test the implementation of {@link BatchOptions#bufferLimit(int)} }. - * use a bufferLimit that greater than actions, then RetryCapableBatchWriter is used - */ - @Override - @Test - public void testBufferLimitGreaterThanActions() throws InterruptedException { - TestAnswer answer = new TestAnswer() { - - int nthCall = 0; - InfluxDBException cacheMaxMemorySizeExceededException = InfluxDBException.buildExceptionForErrorState(createErrorBody(InfluxDBException.CACHE_MAX_MEMORY_SIZE_EXCEEDED_ERROR)); - @Override - protected void check(InvocationOnMock invocation) { - - switch (nthCall++) { - case 0: - throw InfluxDBException.buildExceptionForErrorState(createErrorBody(InfluxDBException.DATABASE_NOT_FOUND_ERROR)); - case 1: - throw InfluxDBException.buildExceptionForErrorState(createErrorBody(InfluxDBException.CACHE_MAX_MEMORY_SIZE_EXCEEDED_ERROR)); - default: - break; - } - } - }; - - InfluxDB spy = spy(influxDB); - doAnswer(answer).when(spy).write(any(BatchPoints.class)); - - String dbName = "write_unittest_" + System.currentTimeMillis(); - try { - BiConsumer, Throwable> mockHandler = mock(BiConsumer.class); - BatchOptions options = BatchOptions.DEFAULTS.bufferLimit(10).actions(8).flushDuration(100).exceptionHandler(mockHandler); - - spy.createDatabase(dbName); - spy.setDatabase(dbName); - spy.enableBatch(options); - writeSomePoints(spy, "measurement1", 0, 5); - - Thread.sleep(300); - verify(mockHandler, atLeastOnce()).accept(any(), any()); - - QueryResult result = spy.query(new Query("select * from measurement1", dbName)); - //assert 0 point written because of non-retry capable DATABASE_NOT_FOUND_ERROR and RetryCapableBatchWriter did not retry - List series = result.getResults().get(0).getSeries(); - Assertions.assertNull(series); - Assertions.assertNull(result.getResults().get(0).getError()); - - writeSomePoints(spy, "measurement2", 0, 5); - - Thread.sleep(300); - - result = spy.query(new Query("select * from measurement2", dbName)); - //assert all 6 point written because of retry capable CACHE_MAX_MEMORY_SIZE_EXCEEDED_ERROR and RetryCapableBatchWriter did retry - Assertions.assertEquals(6, result.getResults().get(0).getSeries().get(0).getValues().size()); - } - finally { - spy.disableBatch(); - spy.deleteDatabase(dbName); - } - } - - - /** - * Test the implementation of {@link BatchOptions#exceptionHandler(BiConsumer)} }. - * @throws InterruptedException - */ - @Override - @Test - public void testHandlerOnRetryImpossible() throws InterruptedException { - - String dbName = "write_unittest_" + System.currentTimeMillis(); - InfluxDB spy = spy(influxDB); - doThrow(DatabaseNotFoundException.class).when(spy).write(any(BatchPoints.class)); - - try { - BiConsumer, Throwable> mockHandler = mock(BiConsumer.class); - BatchOptions options = BatchOptions.DEFAULTS.exceptionHandler(mockHandler).flushDuration(100); - - spy.createDatabase(dbName); - spy.setDatabase(dbName); - spy.enableBatch(options); - - writeSomePoints(spy, 1); - - Thread.sleep(200); - verify(mockHandler, times(1)).accept(any(), any()); - - QueryResult result = influxDB.query(new Query("select * from weather", dbName)); - List series = result.getResults().get(0).getSeries(); - Assertions.assertNull(series); - Assertions.assertNull(result.getResults().get(0).getError()); - } finally { - spy.disableBatch(); - spy.deleteDatabase(dbName); - } - - } } From 0ebf1bca297d010316f8aecdf142b96b965f7d88 Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Mon, 23 Jul 2018 16:30:29 +0700 Subject: [PATCH 218/745] Implement Issue #389 : Support for MessagePack checking of version support at querying time --- .../java/org/influxdb/impl/InfluxDBImpl.java | 40 +++++++++++-------- src/test/java/org/influxdb/InfluxDBTest.java | 5 ++- 2 files changed, 27 insertions(+), 18 deletions(-) diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index 3d1e8913d..366b0e729 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -94,6 +94,7 @@ public class InfluxDBImpl implements InfluxDB { private String database; private String retentionPolicy = "autogen"; private ConsistencyLevel consistency = ConsistencyLevel.ONE; + private final boolean messagePack; private final ChunkProccesor chunkProccesor; /** @@ -113,6 +114,7 @@ public class InfluxDBImpl implements InfluxDB { */ public InfluxDBImpl(final String url, final String username, final String password, final OkHttpClient.Builder client, final ResponseFormat responseFormat) { + this.messagePack = ResponseFormat.MSGPACK.equals(responseFormat); this.hostAddress = parseHostAddress(url); this.username = username; this.password = password; @@ -149,15 +151,6 @@ public InfluxDBImpl(final String url, final String username, final String passwo .build(); this.influxDBService = this.retrofit.create(InfluxDBService.class); - if (ResponseFormat.MSGPACK.equals(responseFormat)) { - String[] versionNumbers = version().split("\\."); - final int major = Integer.parseInt(versionNumbers[0]); - final int minor = Integer.parseInt(versionNumbers[1]); - final int fromMinor = 4; - if ((major < 2) && ((major != 1) || (minor < fromMinor))) { - throw new InfluxDBException("MessagePack format is only supported from InfluxDB version 1.4 and later"); - } - } } public InfluxDBImpl(final String url, final String username, final String password, @@ -169,6 +162,7 @@ public InfluxDBImpl(final String url, final String username, final String passwo InfluxDBImpl(final String url, final String username, final String password, final OkHttpClient.Builder client, final InfluxDBService influxDBService, final JsonAdapter adapter) { super(); + this.messagePack = false; this.hostAddress = parseHostAddress(url); this.username = username; this.password = password; @@ -509,7 +503,7 @@ public void write(final int udpPort, final List records) { */ @Override public QueryResult query(final Query query) { - return execute(callQuery(query)); + return executeQuery(callQuery(query)); } /** @@ -590,7 +584,7 @@ public QueryResult query(final Query query, final TimeUnit timeUnit) { call = this.influxDBService.query(this.username, this.password, query.getDatabase(), TimeUtil.toTimePrecision(timeUnit), query.getCommandWithUrlEncoded()); } - return execute(call); + return executeQuery(call); } /** @@ -600,7 +594,7 @@ public QueryResult query(final Query query, final TimeUnit timeUnit) { public void createDatabase(final String name) { Preconditions.checkNonEmptyString(name, "name"); String createDatabaseQueryString = String.format("CREATE DATABASE \"%s\"", name); - execute(this.influxDBService.postQuery(this.username, this.password, Query.encode(createDatabaseQueryString))); + executeQuery(this.influxDBService.postQuery(this.username, this.password, Query.encode(createDatabaseQueryString))); } /** @@ -608,7 +602,7 @@ public void createDatabase(final String name) { */ @Override public void deleteDatabase(final String name) { - execute(this.influxDBService.postQuery(this.username, this.password, + executeQuery(this.influxDBService.postQuery(this.username, this.password, Query.encode("DROP DATABASE \"" + name + "\""))); } @@ -617,7 +611,7 @@ public void deleteDatabase(final String name) { */ @Override public List describeDatabases() { - QueryResult result = execute(this.influxDBService.query(this.username, + QueryResult result = executeQuery(this.influxDBService.query(this.username, this.password, SHOW_DATABASE_COMMAND_ENCODED)); // {"results":[{"series":[{"name":"databases","columns":["name"],"values":[["mydb"]]}]}]} // Series [name=databases, columns=[name], values=[[mydb], [unittest_1433605300968]]] @@ -671,6 +665,20 @@ static class ErrorMessage { public String error; } + private QueryResult executeQuery(final Call call) { + if (messagePack) { + String[] versionNumbers = version().split("\\."); + final int major = Integer.parseInt(versionNumbers[0]); + final int minor = Integer.parseInt(versionNumbers[1]); + final int fromMinor = 4; + if ((major < 2) && ((major != 1) || (minor < fromMinor))) { + throw new UnsupportedOperationException( + "MessagePack format is only supported from InfluxDB version 1.4 and later"); + } + } + return execute(call); + } + private T execute(final Call call) { try { Response response = call.execute(); @@ -758,7 +766,7 @@ public void createRetentionPolicy(final String rpName, final String database, fi if (isDefault) { queryBuilder.append(" DEFAULT"); } - execute(this.influxDBService.postQuery(this.username, this.password, Query.encode(queryBuilder.toString()))); + executeQuery(this.influxDBService.postQuery(this.username, this.password, Query.encode(queryBuilder.toString()))); } /** @@ -793,7 +801,7 @@ public void dropRetentionPolicy(final String rpName, final String database) { .append("\" ON \"") .append(database) .append("\""); - execute(this.influxDBService.postQuery(this.username, this.password, + executeQuery(this.influxDBService.postQuery(this.username, this.password, Query.encode(queryBuilder.toString()))); } diff --git a/src/test/java/org/influxdb/InfluxDBTest.java b/src/test/java/org/influxdb/InfluxDBTest.java index 468c9bed8..df7835b97 100644 --- a/src/test/java/org/influxdb/InfluxDBTest.java +++ b/src/test/java/org/influxdb/InfluxDBTest.java @@ -886,8 +886,9 @@ public void testIsBatchEnabledWithConsistency() { @Test @EnabledIfEnvironmentVariable(named = "INFLUXDB_VERSION", matches = "1\\.3|1\\.2|1\\.1") public void testMessagePackOnOldDbVersion() { - Assertions.assertThrows(InfluxDBException.class, () -> { - TestUtils.connectToInfluxDB(ResponseFormat.MSGPACK); + Assertions.assertThrows(UnsupportedOperationException.class, () -> { + InfluxDB influxDB = TestUtils.connectToInfluxDB(ResponseFormat.MSGPACK); + influxDB.describeDatabases(); }); } From 4eaa28a51f0c1c4e67d903c72a809d93547e2480 Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Mon, 23 Jul 2018 22:03:26 +0700 Subject: [PATCH 219/745] Implement Issue #389 : Support for MessagePack fix test/resources was specified wrongly --- pom.xml | 3 --- 1 file changed, 3 deletions(-) diff --git a/pom.xml b/pom.xml index 390aa27d4..37e53162a 100644 --- a/pom.xml +++ b/pom.xml @@ -73,9 +73,6 @@ docker-compose.yml - - src/test/resources - From 6b67d7562977483aa6cb37a98fa5803e31818c33 Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Tue, 24 Jul 2018 14:43:44 +0700 Subject: [PATCH 220/745] implement issue #466 a FAQ list for influxdb-java --- FAQ.md | 75 +++++++++++++++++++++++++++++++++++++++++++++++++++++++ README.md | 5 ++++ 2 files changed, 80 insertions(+) create mode 100644 FAQ.md diff --git a/FAQ.md b/FAQ.md new file mode 100644 index 000000000..7137936f1 --- /dev/null +++ b/FAQ.md @@ -0,0 +1,75 @@ +# Frequently Asked Questions + +## Functionality + +- [Is the batch part of the client thread safe ?](#is-the-batch-part-of-the-client-thread-safe-) +- [If multiple threads are accessing it, are they all adding Points to the same batch ?](#if-multiple-threads-are-accessing-it-are-they-all-adding-points-to-the-same-batch-) +- [And if so, is there a single thread in the background that is emptying batch to the server ?](#and-if-so-is-there-a-single-thread-in-the-background-that-is-emptying-batch-to-the-server-) +- [If there is an error during this background process, is it propagated to the rest of the client ?](#if-there-is-an-error-during-this-background-process-is-it-propagated-to-the-rest-of-the-client-) +- [How the client responds to concurrent write backpressure from server ?](#how-the-client-responds-to-concurrent-write-backpressure-from-server-) + + +## Security + +- [Is default config security setup TLS 1.2 ?](#is-default-config-security-setup-tls-12-) + +## Is the batch part of the client thread safe ? + +Yes, the __BatchProcessor__ uses a __BlockingQueue__ and the __RetryCapableBatchWriter__ is synchronized on its __write__ method + +``` +org.influxdb.impl.RetryCapableBatchWriter.write(Collection) + +``` + +## If multiple threads are accessing it, are they all adding Points to the same batch ? + +If they share the same InfluxDbImpl instance, so the answer is Yes (all writing points are put to the __BlockingQueue__) + +## And if so, is there a single thread in the background that is emptying batch to the server ? + +Yes, there is one worker thread that is scheduled to periodically flush the __BlockingQueue__ + +## If there is an error during this background process, is it propagated to the rest of the client ? + +Yes, on initializing BatchOptions, you can pass an exceptionHandler, this handler is used to handle any batch writing that causes a non-recoverable exception or when a batch is evicted due to a retry buffer capacity +(please refer to __BatchOptions.bufferLimit(int)__ for more details) +(list of non-recoverable error : [Handling-errors-of-InfluxDB-under-high-load](https://github.com/influxdata/influxdb-java/wiki/Handling-errors-of-InfluxDB-under-high-load)) + +## How the client responds to concurrent write backpressure from server ? +Concurrent WRITE throttling at server side is controlled by the trio (__max-concurrent-write-limit__, __max-enqueued-write-limit__, __enqueued-write-timeout__) +for example, you can have these in influxdb.conf +``` +max-concurrent-write-limit = 2 +max-enqueued-write-limit = 1 +enqueued-write-timeout = 1000 + +``` + +(more info at this [PR #9888 HTTP Write Throttle](https://github.com/influxdata/influxdb/pull/9888/files)) + +If the number of concurrent writes reach the threshold, then any further write will be immidiately returned with + +``` +org.influxdb.InfluxDBIOException: java.net.SocketException: Connection reset by peer: socket write error + at org.influxdb.impl.InfluxDBImpl.execute(InfluxDBImpl.java:692) + at org.influxdb.impl.InfluxDBImpl.write(InfluxDBImpl.java:428) + +``` + +Form version 2.9, influxdb-java introduces new error handling feature, the client will try to back off and rewrite failed wites on some recoverable errors (list of recoverable error : [Handling-errors-of-InfluxDB-under-high-load](https://github.com/influxdata/influxdb-java/wiki/Handling-errors-of-InfluxDB-under-high-load)) + +So in case the number of write requests exceeds Concurrent write setting at server side, influxdb-java can try to make sure no writing points get lost (due to rejection from server) + +## Is default config security setup TLS 1.2 ? + +(answer need to be verified) + +To construct an InfluxDBImpl you will need to pass a OkHttpClient.Builder instance. +At this point you are able to set your custom SSLSocketFactory via method OkHttpClient.Builder.sslSocketFactory(…) + +In case you don’t set it, OkHttp will use the system default (Java platform dependent), I tested in Java 8 (influxdb-java has CI test in Java 8 and 10) and see the default SSLContext support these protocols +SSLv3/TLSv1/TLSv1.1/TLSv1.2 + +So if the server supports TLS1.2, the communication should be encrypted by TLS 1.2 (during the handshake the client will provide the list of accepted security protocols and the server will pick one, so this case the server would pick TLS 1.2) + diff --git a/README.md b/README.md index b04221c9d..de3890308 100644 --- a/README.md +++ b/README.md @@ -352,3 +352,8 @@ This is a to the sonatype oss guide to publishing. I'll update this section once the [jira ticket](https://issues.sonatype.org/browse/OSSRH-9728) is closed and I'm able to upload artifacts to the sonatype repositories. + +### Frequently Asked Questions + +This is a [FAQ](FAQ.md) list for influxdb-java. + From 2d1c9bc226a9266e2200eb6c3d7c27a810b5fb15 Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Wed, 25 Jul 2018 13:26:35 +0700 Subject: [PATCH 221/745] issue #468 tags should be sorted by key in line protocol to reduce db server overheads add unit test --- src/test/java/org/influxdb/dto/PointTest.java | 27 +++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/src/test/java/org/influxdb/dto/PointTest.java b/src/test/java/org/influxdb/dto/PointTest.java index 7d1a72ccc..ce7ffd08a 100644 --- a/src/test/java/org/influxdb/dto/PointTest.java +++ b/src/test/java/org/influxdb/dto/PointTest.java @@ -450,4 +450,31 @@ public void testLineProtocolHourPrecision() throws Exception { String expectedHourTimeStamp = String.valueOf(Math.round(pDate.getTime() / 3600000)); // 1000ms * 60s * 60m assertThat(hourTime).isEqualTo(expectedHourTimeStamp); } + + /* + * Test if representation of tags in line protocol format should be sorted by tag key + */ + @Test + public void testTagKeyIsSortedInLineProtocol() { + Point p = Point + .measurement("cpu") + .time(1000000000L, TimeUnit.MILLISECONDS) + .addField("value", 1) + .tag("region", "us-west") + .tag("host", "serverA") + .tag("env", "prod") + .tag("target", "servers") + .tag("zone", "1c") + .tag("tag5", "value5") + .tag("tag1", "value1") + .tag("tag2", "value2") + .tag("tag3", "value3") + .tag("tag4", "value4") + .build(); + + String lineProtocol = p.lineProtocol(); + String correctOrder = "env=prod,host=serverA,region=us-west,tag1=value1,tag2=value2,tag3=value3,tag4=value4,tag5=value5,target=servers,zone=1c"; + String tags = lineProtocol.substring(lineProtocol.indexOf(',') + 1, lineProtocol.indexOf(' ')); + assertThat(tags).isEqualTo(correctOrder); + } } From a4a77daf6ee5e2b8e99afcc25130c5aa1bc3f3b6 Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Wed, 25 Jul 2018 13:30:50 +0200 Subject: [PATCH 222/745] Update maven build dependencies to most recent version of used maven plugins --- pom.xml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/pom.xml b/pom.xml index 37e53162a..39032d111 100644 --- a/pom.xml +++ b/pom.xml @@ -88,17 +88,17 @@ org.apache.maven.plugins maven-surefire-plugin - 2.21.0 + 2.22.0 org.apache.maven.plugins maven-site-plugin - 3.7 + 3.7.1 org.apache.maven.plugins maven-clean-plugin - 3.0.0 + 3.1.0 org.apache.maven.plugins @@ -113,12 +113,12 @@ org.apache.maven.plugins maven-jar-plugin - 3.0.2 + 3.1.0 org.apache.maven.plugins maven-resources-plugin - 3.0.2 + 3.1.0 org.apache.maven.plugins @@ -155,7 +155,7 @@ org.apache.maven.plugins maven-javadoc-plugin - 3.0.0 + 3.0.1 attach-javadocs @@ -283,7 +283,7 @@ maven-resources-plugin - 3.0.2 + 3.1.0 copy-resources @@ -310,7 +310,7 @@ com.dkanejs.maven.plugins docker-compose-maven-plugin - 1.0.3 + 2.0.0 up From 363193bcbec8cd9c47949f100b3bc19ac6a697dd Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Thu, 26 Jul 2018 08:09:59 +0200 Subject: [PATCH 223/745] Silent all compilation warnings regarding javadoc and source cross references --- pom.xml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/pom.xml b/pom.xml index 39032d111..aaac6fde5 100644 --- a/pom.xml +++ b/pom.xml @@ -50,6 +50,11 @@ target/site + + org.apache.maven.plugins + maven-jxr-plugin + 2.5 + @@ -164,6 +169,9 @@ + + -html5 + org.jacoco From 36fe47d3e84aa7f3b5db761aa1ee9247b453ab33 Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Thu, 26 Jul 2018 08:42:11 +0200 Subject: [PATCH 224/745] checkstyle dont like jxr --- pom.xml | 5 ----- 1 file changed, 5 deletions(-) diff --git a/pom.xml b/pom.xml index aaac6fde5..e976b3782 100644 --- a/pom.xml +++ b/pom.xml @@ -50,11 +50,6 @@ target/site - - org.apache.maven.plugins - maven-jxr-plugin - 2.5 - From 7edecca1fe8d364d35fa74a9b687d2836f169a7d Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Thu, 26 Jul 2018 09:08:15 +0200 Subject: [PATCH 225/745] Javadoc 8 only knows about html4 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index e976b3782..4d9d4cb35 100644 --- a/pom.xml +++ b/pom.xml @@ -165,7 +165,7 @@ - -html5 + -html4 From 3e24fa471c95519dc86afdb466fdeee4bd8bd218 Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Thu, 26 Jul 2018 09:34:09 +0200 Subject: [PATCH 226/745] DO not create proper html5/html4 javadoc because this wont be easily possible with java10 and java8 build --- pom.xml | 3 --- 1 file changed, 3 deletions(-) diff --git a/pom.xml b/pom.xml index 4d9d4cb35..39032d111 100644 --- a/pom.xml +++ b/pom.xml @@ -164,9 +164,6 @@ - - -html4 - org.jacoco From 96ffd8c62d951d83b2266918e82ebaaa592c9370 Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Thu, 26 Jul 2018 13:22:21 +0700 Subject: [PATCH 227/745] fix issue #445 : https://github.com/influxdata/influxdb-java/issues/445 --- .../java/org/influxdb/impl/InfluxDBImpl.java | 12 ++-- src/test/java/org/influxdb/InfluxDBTest.java | 63 ++++++++++++++++++- 2 files changed, 69 insertions(+), 6 deletions(-) diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index 366b0e729..096230c36 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -123,12 +123,13 @@ public InfluxDBImpl(final String url, final String username, final String passwo setLogLevel(LOG_LEVEL); this.gzipRequestInterceptor = new GzipRequestInterceptor(); - client.addInterceptor(loggingInterceptor).addInterceptor(gzipRequestInterceptor); + OkHttpClient.Builder clonedBuilder = client.build().newBuilder(); + clonedBuilder.addInterceptor(loggingInterceptor).addInterceptor(gzipRequestInterceptor); Factory converterFactory = null; switch (responseFormat) { case MSGPACK: - client.addInterceptor(chain -> { + clonedBuilder.addInterceptor(chain -> { Request request = chain.request().newBuilder().addHeader("Accept", APPLICATION_MSGPACK) .addHeader("Accept-Encoding", "identity").build(); return chain.proceed(request); @@ -147,8 +148,8 @@ public InfluxDBImpl(final String url, final String username, final String passwo break; } - this.retrofit = new Retrofit.Builder().baseUrl(url).client(client.build()).addConverterFactory(converterFactory) - .build(); + this.retrofit = new Retrofit.Builder().baseUrl(url).client( + clonedBuilder.build()).addConverterFactory(converterFactory).build(); this.influxDBService = this.retrofit.create(InfluxDBService.class); } @@ -171,8 +172,9 @@ public InfluxDBImpl(final String url, final String username, final String passwo setLogLevel(LOG_LEVEL); this.gzipRequestInterceptor = new GzipRequestInterceptor(); + OkHttpClient.Builder clonedBuilder = client.build().newBuilder(); this.retrofit = new Retrofit.Builder().baseUrl(url) - .client(client.addInterceptor(loggingInterceptor).addInterceptor(gzipRequestInterceptor).build()) + .client(clonedBuilder.addInterceptor(loggingInterceptor).addInterceptor(gzipRequestInterceptor).build()) .addConverterFactory(MoshiConverterFactory.create()).build(); this.influxDBService = influxDBService; diff --git a/src/test/java/org/influxdb/InfluxDBTest.java b/src/test/java/org/influxdb/InfluxDBTest.java index df7835b97..d5af2bf2c 100644 --- a/src/test/java/org/influxdb/InfluxDBTest.java +++ b/src/test/java/org/influxdb/InfluxDBTest.java @@ -18,6 +18,8 @@ import org.junit.platform.runner.JUnitPlatform; import org.junit.runner.RunWith; +import okhttp3.OkHttpClient; + import java.io.IOException; import java.time.Instant; import java.time.ZoneId; @@ -27,7 +29,9 @@ import java.util.List; import java.util.Set; import java.util.concurrent.BlockingQueue; +import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadFactory; @@ -891,5 +895,62 @@ public void testMessagePackOnOldDbVersion() { influxDB.describeDatabases(); }); } - + + /** + * test for issue #445 + * make sure reusing of OkHttpClient.Builder causes no error + * @throws InterruptedException + */ + @Test + public void testIssue445() throws InterruptedException { + ExecutorService executor = Executors.newFixedThreadPool(100); + + final int maxCallables = 10_000; + List> callableList = new ArrayList<>(maxCallables); + for (int i = 0; i < maxCallables; i++) { + callableList.add(new Callable() { + @Override + public String call() throws Exception { + MyInfluxDBBean myBean = new MyInfluxDBBean(); + return myBean.connectAndDoNothing1(); + } + }); + } + executor.invokeAll(callableList); + executor.shutdown(); + if (!executor.awaitTermination(20, TimeUnit.SECONDS)) { + executor.shutdownNow(); + } + Assertions.assertTrue(MyInfluxDBBean.OK); + //assert that MyInfluxDBBean.OKHTTP_BUILDER stays untouched (no interceptor added) + Assertions.assertTrue(MyInfluxDBBean.OKHTTP_BUILDER.interceptors().isEmpty()); + } + + private static final class MyInfluxDBBean { + + static final OkHttpClient.Builder OKHTTP_BUILDER = new OkHttpClient.Builder(); + static Boolean OK = true; + static final String URL = "http://" + TestUtils.getInfluxIP() + ":" + TestUtils.getInfluxPORT(true); + + InfluxDB influxClient; + + String connectAndDoNothing1() { + synchronized (OK) { + if (!OK) { + return null; + } + } + try { + influxClient = InfluxDBFactory.connect(URL, "admin", "admin", OKHTTP_BUILDER); + influxClient.close(); + } catch (Exception e) { + synchronized (OK) { + if (OK) { + OK = false; + } + } + } + return null; + } + } } From 5d1c18f8f95eab0546c437bbb279a67ae14307f9 Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Fri, 27 Jul 2018 15:13:32 +0700 Subject: [PATCH 228/745] 2.12 changelog entries --- CHANGELOG.md | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index b76eea394..43a47507c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,21 @@ # Changelog +## 2.12 [unreleased] + +### Fixes + +- Remove code which checks for unsupported influxdb versions [PR #474](https://github.com/influxdata/influxdb-java/pull/474) +- Unpredictable errors when OkHttpClient.Builder instance is reused [PR #478](https://github.com/influxdata/influxdb-java/pull/478) + +### Features + +- Support for MessagePack [PR #471](https://github.com/influxdata/influxdb-java/pull/471) +- Cache version per influxdb instance and reduce ping() calls for every query call [PR #472](https://github.com/influxdata/influxdb-java/pull/472) + +### Improvements + +- Test: Unit test to ensure tags should be sorted by key in line protocol (to reduce db server overheads) [PR #476](https://github.com/influxdata/influxdb-java/pull/476) + ## 2.11 [2018-07-02] ### Features From 405781c066f8b7a3e5dae84e74501e6132ddd103 Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Tue, 31 Jul 2018 10:44:05 +0700 Subject: [PATCH 229/745] changelog entry for PR #475 FAQ list for influxdb-java --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 43a47507c..4576ad8fb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,7 @@ - Support for MessagePack [PR #471](https://github.com/influxdata/influxdb-java/pull/471) - Cache version per influxdb instance and reduce ping() calls for every query call [PR #472](https://github.com/influxdata/influxdb-java/pull/472) +- FAQ list for influxdb-java [PR #475](https://github.com/influxdata/influxdb-java/pull/475) ### Improvements From 9cf07ee6523e18b0aafba885633d1f3ccf2daee8 Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Tue, 31 Jul 2018 14:43:18 +0700 Subject: [PATCH 230/745] Implement Issue #389 : Support for MessagePack accept-encoding must be controlled by custom Request Interceptor rather than hard-coding --- src/main/java/org/influxdb/impl/InfluxDBImpl.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index 096230c36..07916cf98 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -130,8 +130,7 @@ public InfluxDBImpl(final String url, final String username, final String passwo switch (responseFormat) { case MSGPACK: clonedBuilder.addInterceptor(chain -> { - Request request = chain.request().newBuilder().addHeader("Accept", APPLICATION_MSGPACK) - .addHeader("Accept-Encoding", "identity").build(); + Request request = chain.request().newBuilder().addHeader("Accept", APPLICATION_MSGPACK).build(); return chain.proceed(request); }); From e1a3ac39a2db2004968127b32c894d94b25d40e4 Mon Sep 17 00:00:00 2001 From: Tomas Klapka Date: Tue, 31 Jul 2018 16:06:58 +0200 Subject: [PATCH 231/745] [maven-release-plugin] prepare release influxdb-java-2.12 --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 39032d111..c77bb1680 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.influxdb influxdb-java jar - 2.12-SNAPSHOT + 2.12 influxdb java bindings Java API to access the InfluxDB REST API http://www.influxdb.org @@ -28,7 +28,7 @@ scm:git:git@github.com:influxdata/influxdb-java.git scm:git:git@github.com:influxdata/influxdb-java.git git@github.com:influxdata/influxdb-java.git - HEAD + influxdb-java-2.12 From 41b13fd8ec42d317639a8e0362eb072c466c4d76 Mon Sep 17 00:00:00 2001 From: Tomas Klapka Date: Tue, 31 Jul 2018 16:07:09 +0200 Subject: [PATCH 232/745] [maven-release-plugin] prepare for next development iteration --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index c77bb1680..dd6cba7f9 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.influxdb influxdb-java jar - 2.12 + 2.13-SNAPSHOT influxdb java bindings Java API to access the InfluxDB REST API http://www.influxdb.org @@ -28,7 +28,7 @@ scm:git:git@github.com:influxdata/influxdb-java.git scm:git:git@github.com:influxdata/influxdb-java.git git@github.com:influxdata/influxdb-java.git - influxdb-java-2.12 + HEAD From becbfb59b61662b8171bc0dfaf2b343066a9798c Mon Sep 17 00:00:00 2001 From: ivankudibal Date: Tue, 31 Jul 2018 16:44:39 +0200 Subject: [PATCH 233/745] provide release date of 2.12 --- CHANGELOG.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4576ad8fb..732c66fde 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,9 @@ # Changelog -## 2.12 [unreleased] +## 2.13 [unreleased] + + +## 2.12 [2018-07-31] ### Fixes From f134e5b60150b32c1797cc0cba26bb19d8b5de81 Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Tue, 14 Aug 2018 13:28:03 +0700 Subject: [PATCH 234/745] fix issue #485 : MessagePack queries: Exception during parsing InfluxDB version [macOS] --- .../java/org/influxdb/impl/InfluxDBImpl.java | 26 +++++++++--- .../org/influxdb/MessagePackInfluxDBTest.java | 41 +++++++++++++++++++ 2 files changed, 62 insertions(+), 5 deletions(-) diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index 07916cf98..980184d93 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -54,6 +54,8 @@ import java.util.concurrent.atomic.LongAdder; import java.util.function.BiConsumer; import java.util.function.Consumer; +import java.util.regex.Matcher; +import java.util.regex.Pattern; /** * Implementation of a InluxDB API. @@ -95,6 +97,7 @@ public class InfluxDBImpl implements InfluxDB { private String retentionPolicy = "autogen"; private ConsistencyLevel consistency = ConsistencyLevel.ONE; private final boolean messagePack; + private Boolean messagePackSupport; private final ChunkProccesor chunkProccesor; /** @@ -666,13 +669,26 @@ static class ErrorMessage { public String error; } + private boolean checkMessagePackSupport() { + Matcher matcher = Pattern.compile("(\\d+\\.*)+").matcher(version()); + if (!matcher.find()) { + return false; + } + String s = matcher.group(); + String[] versionNumbers = s.split("\\."); + final int major = Integer.parseInt(versionNumbers[0]); + final int minor = Integer.parseInt(versionNumbers[1]); + final int fromMinor = 4; + return (major >= 2) || ((major == 1) && (minor >= fromMinor)); + } + private QueryResult executeQuery(final Call call) { if (messagePack) { - String[] versionNumbers = version().split("\\."); - final int major = Integer.parseInt(versionNumbers[0]); - final int minor = Integer.parseInt(versionNumbers[1]); - final int fromMinor = 4; - if ((major < 2) && ((major != 1) || (minor < fromMinor))) { + if (messagePackSupport == null) { + messagePackSupport = checkMessagePackSupport(); + } + + if (!messagePackSupport) { throw new UnsupportedOperationException( "MessagePack format is only supported from InfluxDB version 1.4 and later"); } diff --git a/src/test/java/org/influxdb/MessagePackInfluxDBTest.java b/src/test/java/org/influxdb/MessagePackInfluxDBTest.java index 50662d62c..c8ab8b2c0 100644 --- a/src/test/java/org/influxdb/MessagePackInfluxDBTest.java +++ b/src/test/java/org/influxdb/MessagePackInfluxDBTest.java @@ -1,5 +1,9 @@ package org.influxdb; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.spy; + import java.io.IOException; import java.util.ArrayList; import java.util.List; @@ -183,4 +187,41 @@ public void testWriteRecordsWithPrecision() throws Exception { Assertions.assertEquals(queryResult.getResults().get(0).getSeries().get(0).getValues().get(2).get(0), timeP3); this.influxDB.deleteDatabase(dbName); } + + @Test + public void testInfluxDBVersionChecking() throws InterruptedException, IOException { + + InfluxDB spy = spy(TestUtils.connectToInfluxDB(ResponseFormat.MSGPACK)); + + doReturn("1.5.2").when(spy).version(); + spy.databaseExists("abc"); + spy.close(); + + spy = spy(TestUtils.connectToInfluxDB(ResponseFormat.MSGPACK)); + doReturn("v1.6.0").when(spy).version(); + spy.databaseExists("abc"); + spy.close(); + + assertThrows(UnsupportedOperationException.class, () -> { + InfluxDB spy1 = spy(TestUtils.connectToInfluxDB(ResponseFormat.MSGPACK)); + try { + doReturn("1.3.0").when(spy1).version(); + spy1.databaseExists("abc"); + } finally { + spy1.close(); + } + + }); + + assertThrows(UnsupportedOperationException.class, () -> { + InfluxDB spy1 = spy(TestUtils.connectToInfluxDB(ResponseFormat.MSGPACK)); + try { + doReturn("a.b.c").when(spy1).version(); + spy1.databaseExists("abc"); + } finally { + spy1.close(); + } + }); + + } } From 090864f54beb185f63ef268c9c95a7c87749e35f Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Tue, 21 Aug 2018 11:01:14 +0700 Subject: [PATCH 235/745] imlement issue #449 : support for Basic Authentication --- CHANGELOG.md | 3 ++ .../influxdb/impl/BasicAuthInterceptor.java | 24 +++++++++ .../java/org/influxdb/impl/InfluxDBImpl.java | 49 ++++++------------- .../org/influxdb/impl/InfluxDBService.java | 25 ++++------ .../influxdb/impl/ChunkingExceptionTest.java | 2 +- 5 files changed, 54 insertions(+), 49 deletions(-) create mode 100644 src/main/java/org/influxdb/impl/BasicAuthInterceptor.java diff --git a/CHANGELOG.md b/CHANGELOG.md index 732c66fde..da1cdc53e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,9 @@ ## 2.13 [unreleased] +### Features + +- Support for Basic Authentication [PR #492](https://github.com/influxdata/influxdb-java/pull/492) ## 2.12 [2018-07-31] diff --git a/src/main/java/org/influxdb/impl/BasicAuthInterceptor.java b/src/main/java/org/influxdb/impl/BasicAuthInterceptor.java new file mode 100644 index 000000000..ffa75af61 --- /dev/null +++ b/src/main/java/org/influxdb/impl/BasicAuthInterceptor.java @@ -0,0 +1,24 @@ +package org.influxdb.impl; + +import java.io.IOException; + +import okhttp3.Credentials; +import okhttp3.Interceptor; +import okhttp3.Request; +import okhttp3.Response; + +public class BasicAuthInterceptor implements Interceptor { + + private String credentials; + + public BasicAuthInterceptor(final String user, final String password) { + credentials = Credentials.basic(user, password); + } + + @Override + public Response intercept(final Chain chain) throws IOException { + Request request = chain.request(); + Request authenticatedRequest = request.newBuilder().header("Authorization", credentials).build(); + return chain.proceed(authenticatedRequest); + } +} diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index 07916cf98..c6b36ed51 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -77,8 +77,6 @@ public class InfluxDBImpl implements InfluxDB { private static final LogLevel LOG_LEVEL = LogLevel.parseLogLevel(System.getProperty(LOG_LEVEL_PROPERTY)); private final InetAddress hostAddress; - private final String username; - private final String password; private String version; private final Retrofit retrofit; private final InfluxDBService influxDBService; @@ -116,16 +114,14 @@ public InfluxDBImpl(final String url, final String username, final String passwo final ResponseFormat responseFormat) { this.messagePack = ResponseFormat.MSGPACK.equals(responseFormat); this.hostAddress = parseHostAddress(url); - this.username = username; - this.password = password; this.loggingInterceptor = new HttpLoggingInterceptor(); setLogLevel(LOG_LEVEL); this.gzipRequestInterceptor = new GzipRequestInterceptor(); OkHttpClient.Builder clonedBuilder = client.build().newBuilder(); - clonedBuilder.addInterceptor(loggingInterceptor).addInterceptor(gzipRequestInterceptor); - + clonedBuilder.addInterceptor(loggingInterceptor).addInterceptor(gzipRequestInterceptor). + addInterceptor(new BasicAuthInterceptor(username, password)); Factory converterFactory = null; switch (responseFormat) { case MSGPACK: @@ -164,8 +160,6 @@ public InfluxDBImpl(final String url, final String username, final String passwo super(); this.messagePack = false; this.hostAddress = parseHostAddress(url); - this.username = username; - this.password = password; this.loggingInterceptor = new HttpLoggingInterceptor(); setLogLevel(LOG_LEVEL); @@ -173,7 +167,8 @@ public InfluxDBImpl(final String url, final String username, final String passwo this.gzipRequestInterceptor = new GzipRequestInterceptor(); OkHttpClient.Builder clonedBuilder = client.build().newBuilder(); this.retrofit = new Retrofit.Builder().baseUrl(url) - .client(clonedBuilder.addInterceptor(loggingInterceptor).addInterceptor(gzipRequestInterceptor).build()) + .client(clonedBuilder.addInterceptor(loggingInterceptor).addInterceptor(gzipRequestInterceptor). + addInterceptor(new BasicAuthInterceptor(username, password)).build()) .addConverterFactory(MoshiConverterFactory.create()).build(); this.influxDBService = influxDBService; @@ -420,8 +415,6 @@ public void write(final BatchPoints batchPoints) { this.batchedCount.add(batchPoints.getPoints().size()); RequestBody lineProtocol = RequestBody.create(MEDIA_TYPE_STRING, batchPoints.lineProtocol()); execute(this.influxDBService.writePoints( - this.username, - this.password, batchPoints.getDatabase(), batchPoints.getRetentionPolicy(), TimeUtil.toTimePrecision(batchPoints.getPrecision()), @@ -434,8 +427,6 @@ public void write(final BatchPoints batchPoints) { public void write(final String database, final String retentionPolicy, final ConsistencyLevel consistency, final TimeUnit precision, final String records) { execute(this.influxDBService.writePoints( - this.username, - this.password, database, retentionPolicy, TimeUtil.toTimePrecision(precision), @@ -534,12 +525,10 @@ public void query(final Query query, final int chunkSize, final Consumer call = null; if (query instanceof BoundParameterQuery) { BoundParameterQuery boundParameterQuery = (BoundParameterQuery) query; - call = this.influxDBService.query(this.username, this.password, - query.getDatabase(), query.getCommandWithUrlEncoded(), chunkSize, + call = this.influxDBService.query(query.getDatabase(), query.getCommandWithUrlEncoded(), chunkSize, boundParameterQuery.getParameterJsonWithUrlEncoded()); } else { - call = this.influxDBService.query(this.username, this.password, - query.getDatabase(), query.getCommandWithUrlEncoded(), chunkSize); + call = this.influxDBService.query(query.getDatabase(), query.getCommandWithUrlEncoded(), chunkSize); } call.enqueue(new Callback() { @@ -578,11 +567,11 @@ public QueryResult query(final Query query, final TimeUnit timeUnit) { Call call = null; if (query instanceof BoundParameterQuery) { BoundParameterQuery boundParameterQuery = (BoundParameterQuery) query; - call = this.influxDBService.query(this.username, this.password, query.getDatabase(), + call = this.influxDBService.query(query.getDatabase(), TimeUtil.toTimePrecision(timeUnit), query.getCommandWithUrlEncoded(), boundParameterQuery.getParameterJsonWithUrlEncoded()); } else { - call = this.influxDBService.query(this.username, this.password, query.getDatabase(), + call = this.influxDBService.query(query.getDatabase(), TimeUtil.toTimePrecision(timeUnit), query.getCommandWithUrlEncoded()); } return executeQuery(call); @@ -595,7 +584,7 @@ public QueryResult query(final Query query, final TimeUnit timeUnit) { public void createDatabase(final String name) { Preconditions.checkNonEmptyString(name, "name"); String createDatabaseQueryString = String.format("CREATE DATABASE \"%s\"", name); - executeQuery(this.influxDBService.postQuery(this.username, this.password, Query.encode(createDatabaseQueryString))); + executeQuery(this.influxDBService.postQuery(Query.encode(createDatabaseQueryString))); } /** @@ -603,8 +592,7 @@ public void createDatabase(final String name) { */ @Override public void deleteDatabase(final String name) { - executeQuery(this.influxDBService.postQuery(this.username, this.password, - Query.encode("DROP DATABASE \"" + name + "\""))); + executeQuery(this.influxDBService.postQuery(Query.encode("DROP DATABASE \"" + name + "\""))); } /** @@ -612,8 +600,7 @@ public void deleteDatabase(final String name) { */ @Override public List describeDatabases() { - QueryResult result = executeQuery(this.influxDBService.query(this.username, - this.password, SHOW_DATABASE_COMMAND_ENCODED)); + QueryResult result = executeQuery(this.influxDBService.query(SHOW_DATABASE_COMMAND_ENCODED)); // {"results":[{"series":[{"name":"databases","columns":["name"],"values":[["mydb"]]}]}]} // Series [name=databases, columns=[name], values=[[mydb], [unittest_1433605300968]]] List> databaseNames = result.getResults().get(0).getSeries().get(0).getValues(); @@ -647,16 +634,13 @@ private Call callQuery(final Query query) { Call call; if (query instanceof BoundParameterQuery) { BoundParameterQuery boundParameterQuery = (BoundParameterQuery) query; - call = this.influxDBService.postQuery(this.username, - this.password, query.getDatabase(), query.getCommandWithUrlEncoded(), + call = this.influxDBService.postQuery(query.getDatabase(), query.getCommandWithUrlEncoded(), boundParameterQuery.getParameterJsonWithUrlEncoded()); } else { if (query.requiresPost()) { - call = this.influxDBService.postQuery(this.username, - this.password, query.getDatabase(), query.getCommandWithUrlEncoded()); + call = this.influxDBService.postQuery(query.getDatabase(), query.getCommandWithUrlEncoded()); } else { - call = this.influxDBService.query(this.username, - this.password, query.getDatabase(), query.getCommandWithUrlEncoded()); + call = this.influxDBService.query(query.getDatabase(), query.getCommandWithUrlEncoded()); } } return call; @@ -767,7 +751,7 @@ public void createRetentionPolicy(final String rpName, final String database, fi if (isDefault) { queryBuilder.append(" DEFAULT"); } - executeQuery(this.influxDBService.postQuery(this.username, this.password, Query.encode(queryBuilder.toString()))); + executeQuery(this.influxDBService.postQuery(Query.encode(queryBuilder.toString()))); } /** @@ -802,8 +786,7 @@ public void dropRetentionPolicy(final String rpName, final String database) { .append("\" ON \"") .append(database) .append("\""); - executeQuery(this.influxDBService.postQuery(this.username, this.password, - Query.encode(queryBuilder.toString()))); + executeQuery(this.influxDBService.postQuery(Query.encode(queryBuilder.toString()))); } private interface ChunkProccesor { diff --git a/src/main/java/org/influxdb/impl/InfluxDBService.java b/src/main/java/org/influxdb/impl/InfluxDBService.java index 074e9b004..dfe897257 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBService.java +++ b/src/main/java/org/influxdb/impl/InfluxDBService.java @@ -39,49 +39,44 @@ interface InfluxDBService { * Can be one of one, any, all, quorum. Defaults to all. */ @POST("write") - public Call writePoints(@Query(U) String username, - @Query(P) String password, @Query(DB) String database, + public Call writePoints(@Query(DB) String database, @Query(RP) String retentionPolicy, @Query(PRECISION) String precision, @Query(CONSISTENCY) String consistency, @Body RequestBody batchPoints); @GET("query") - public Call query(@Query(U) String username, @Query(P) String password, @Query(DB) String db, + public Call query(@Query(DB) String db, @Query(EPOCH) String epoch, @Query(value = Q, encoded = true) String query); @POST("query") - public Call query(@Query(U) String username, @Query(P) String password, @Query(DB) String db, + public Call query(@Query(DB) String db, @Query(EPOCH) String epoch, @Query(value = Q, encoded = true) String query, @Query(value = PARAMS, encoded = true) String params); @GET("query") - public Call query(@Query(U) String username, @Query(P) String password, @Query(DB) String db, + public Call query(@Query(DB) String db, @Query(value = Q, encoded = true) String query); @POST("query") - public Call postQuery(@Query(U) String username, @Query(P) String password, @Query(DB) String db, + public Call postQuery(@Query(DB) String db, @Query(value = Q, encoded = true) String query); @POST("query") - public Call postQuery(@Query(U) String username, @Query(P) String password, @Query(DB) String db, + public Call postQuery(@Query(DB) String db, @Query(value = Q, encoded = true) String query, @Query(value = PARAMS, encoded = true) String params); @GET("query") - public Call query(@Query(U) String username, @Query(P) String password, - @Query(value = Q, encoded = true) String query); + public Call query(@Query(value = Q, encoded = true) String query); @POST("query") - public Call postQuery(@Query(U) String username, - @Query(P) String password, @Query(value = Q, encoded = true) String query); + public Call postQuery(@Query(value = Q, encoded = true) String query); @Streaming @GET("query?chunked=true") - public Call query(@Query(U) String username, - @Query(P) String password, @Query(DB) String db, @Query(value = Q, encoded = true) String query, + public Call query(@Query(DB) String db, @Query(value = Q, encoded = true) String query, @Query(CHUNK_SIZE) int chunkSize); @Streaming @POST("query?chunked=true") - public Call query(@Query(U) String username, - @Query(P) String password, @Query(DB) String db, @Query(value = Q, encoded = true) String query, + public Call query(@Query(DB) String db, @Query(value = Q, encoded = true) String query, @Query(CHUNK_SIZE) int chunkSize, @Query(value = PARAMS, encoded = true) String params); } diff --git a/src/test/java/org/influxdb/impl/ChunkingExceptionTest.java b/src/test/java/org/influxdb/impl/ChunkingExceptionTest.java index c81189b92..b18c657f4 100644 --- a/src/test/java/org/influxdb/impl/ChunkingExceptionTest.java +++ b/src/test/java/org/influxdb/impl/ChunkingExceptionTest.java @@ -56,7 +56,7 @@ public void testChunkingException(Exception ex, String message) throws IOExcepti Call call = mock(Call.class); ResponseBody responseBody = mock(ResponseBody.class); - when(influxDBService.query(any(String.class), any(String.class), any(String.class), any(String.class), anyInt())).thenReturn(call); + when(influxDBService.query(any(String.class), any(String.class), anyInt())).thenReturn(call); when(responseBody.source()).thenReturn(new Buffer()); doThrow(ex).when(adapter).fromJson(any(JsonReader.class)); From 616cecfae551e3a113c3728c6b2038e9cd91e0e2 Mon Sep 17 00:00:00 2001 From: Jakub Bednar Date: Mon, 6 Aug 2018 13:23:18 +0200 Subject: [PATCH 236/745] Added possibility to reuse client as a core part of Reactive client * The InfluxDBImpl constructor has parameter for customise Retrofit.Builder. That builder is use in influxdb-java-reactive client for create reactive version of InfluxDBService. * The InfluxDBResultMapper is able to handle results which a different time precision * The InfluxDBResultMapper is able to map Integer to Instant --- .../java/org/influxdb/InfluxDBException.java | 2 +- .../java/org/influxdb/impl/InfluxDBImpl.java | 33 ++++++- .../influxdb/impl/InfluxDBResultMapper.java | 94 ++++++++++++++++--- .../impl/InfluxDBResultMapperTest.java | 76 ++++++++++++++- .../impl/RetryCapableBatchWriterTest.java | 15 ++- 5 files changed, 197 insertions(+), 23 deletions(-) diff --git a/src/main/java/org/influxdb/InfluxDBException.java b/src/main/java/org/influxdb/InfluxDBException.java index bc09396eb..5e47d5491 100644 --- a/src/main/java/org/influxdb/InfluxDBException.java +++ b/src/main/java/org/influxdb/InfluxDBException.java @@ -126,7 +126,7 @@ public boolean isRetryWorth() { } } - private static InfluxDBException buildExceptionFromErrorMessage(final String errorMessage) { + public static InfluxDBException buildExceptionFromErrorMessage(final String errorMessage) { if (errorMessage.contains(DATABASE_NOT_FOUND_ERROR)) { return new DatabaseNotFoundException(errorMessage); } diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index a2f1019b6..aada0c75b 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -107,14 +107,37 @@ public class InfluxDBImpl implements InfluxDB { * The InfluxDB user name * @param password * The InfluxDB user password - * @param client + * @param okHttpBuilder * The OkHttp Client Builder * @param responseFormat * The {@code ResponseFormat} to use for response from InfluxDB * server */ - public InfluxDBImpl(final String url, final String username, final String password, final OkHttpClient.Builder client, - final ResponseFormat responseFormat) { + public InfluxDBImpl(final String url, final String username, final String password, + final OkHttpClient.Builder okHttpBuilder, final ResponseFormat responseFormat) { + this(url, username, password, okHttpBuilder, new Retrofit.Builder(), responseFormat); + } + + /** + * Constructs a new {@code InfluxDBImpl}. + * + * @param url + * The InfluxDB server API URL + * @param username + * The InfluxDB user name + * @param password + * The InfluxDB user password + * @param okHttpBuilder + * The OkHttp Client Builder + * @param retrofitBuilder + * The Retrofit Builder + * @param responseFormat + * The {@code ResponseFormat} to use for response from InfluxDB + * server + */ + public InfluxDBImpl(final String url, final String username, final String password, + final OkHttpClient.Builder okHttpBuilder, final Retrofit.Builder retrofitBuilder, + final ResponseFormat responseFormat) { this.messagePack = ResponseFormat.MSGPACK.equals(responseFormat); this.hostAddress = parseHostAddress(url); @@ -122,7 +145,7 @@ public InfluxDBImpl(final String url, final String username, final String passwo setLogLevel(LOG_LEVEL); this.gzipRequestInterceptor = new GzipRequestInterceptor(); - OkHttpClient.Builder clonedBuilder = client.build().newBuilder(); + OkHttpClient.Builder clonedBuilder = okHttpBuilder.build().newBuilder(); clonedBuilder.addInterceptor(loggingInterceptor).addInterceptor(gzipRequestInterceptor). addInterceptor(new BasicAuthInterceptor(username, password)); Factory converterFactory = null; @@ -146,7 +169,7 @@ public InfluxDBImpl(final String url, final String username, final String passwo break; } - this.retrofit = new Retrofit.Builder().baseUrl(url).client( + this.retrofit = retrofitBuilder.baseUrl(url).client( clonedBuilder.build()).addConverterFactory(converterFactory).build(); this.influxDBService = this.retrofit.create(InfluxDBService.class); diff --git a/src/main/java/org/influxdb/impl/InfluxDBResultMapper.java b/src/main/java/org/influxdb/impl/InfluxDBResultMapper.java index efe3a991a..1b99073be 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBResultMapper.java +++ b/src/main/java/org/influxdb/impl/InfluxDBResultMapper.java @@ -27,8 +27,8 @@ import java.time.temporal.ChronoField; import java.util.LinkedList; import java.util.List; -import java.util.Objects; import java.util.Map.Entry; +import java.util.Objects; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.TimeUnit; @@ -84,9 +84,33 @@ public class InfluxDBResultMapper { * possible to define the values of your POJO (e.g. due to an unsupported field type). */ public List toPOJO(final QueryResult queryResult, final Class clazz) throws InfluxDBMapperException { + return toPOJO(queryResult, clazz, TimeUnit.MILLISECONDS); + } + + /** + *

+ * Process a {@link QueryResult} object returned by the InfluxDB client inspecting the internal + * data structure and creating the respective object instances based on the Class passed as + * parameter. + *

+ * + * @param queryResult the InfluxDB result object + * @param clazz the Class that will be used to hold your measurement data + * @param precision the time precision of results + * @param the target type + * + * @return a {@link List} of objects from the same Class passed as parameter and sorted on the + * same order as received from InfluxDB. + * + * @throws InfluxDBMapperException If {@link QueryResult} parameter contain errors, + * clazz parameter is not annotated with @Measurement or it was not + * possible to define the values of your POJO (e.g. due to an unsupported field type). + */ + public List toPOJO(final QueryResult queryResult, final Class clazz, + final TimeUnit precision) throws InfluxDBMapperException { throwExceptionIfMissingAnnotation(clazz); String measurementName = getMeasurementName(clazz); - return this.toPOJO(queryResult, clazz, measurementName); + return this.toPOJO(queryResult, clazz, measurementName, precision); } /** @@ -110,6 +134,32 @@ public List toPOJO(final QueryResult queryResult, final Class clazz) t */ public List toPOJO(final QueryResult queryResult, final Class clazz, final String measurementName) throws InfluxDBMapperException { + return toPOJO(queryResult, clazz, measurementName, TimeUnit.MILLISECONDS); + } + + /** + *

+ * Process a {@link QueryResult} object returned by the InfluxDB client inspecting the internal + * data structure and creating the respective object instances based on the Class passed as + * parameter. + *

+ * + * @param queryResult the InfluxDB result object + * @param clazz the Class that will be used to hold your measurement data + * @param the target type + * @param measurementName name of the Measurement + * @param precision the time precision of results + * + * @return a {@link List} of objects from the same Class passed as parameter and sorted on the + * same order as received from InfluxDB. + * + * @throws InfluxDBMapperException If {@link QueryResult} parameter contain errors, + * clazz parameter is not annotated with @Measurement or it was not + * possible to define the values of your POJO (e.g. due to an unsupported field type). + */ + public List toPOJO(final QueryResult queryResult, final Class clazz, final String measurementName, + final TimeUnit precision) + throws InfluxDBMapperException { Objects.requireNonNull(measurementName, "measurementName"); Objects.requireNonNull(queryResult, "queryResult"); @@ -126,7 +176,7 @@ public List toPOJO(final QueryResult queryResult, final Class clazz, f internalResult.getSeries().stream() .filter(series -> series.getName().equals(measurementName)) .forEachOrdered(series -> { - parseSeriesAs(series, clazz, result); + parseSeriesAs(series, clazz, result, precision); }); }); @@ -152,7 +202,7 @@ void throwExceptionIfResultWithError(final QueryResult queryResult) { }); } - void cacheMeasurementClass(final Class... classVarAgrs) { + public void cacheMeasurementClass(final Class... classVarAgrs) { for (Class clazz : classVarAgrs) { if (CLASS_FIELD_CACHE.containsKey(clazz.getName())) { continue; @@ -172,13 +222,22 @@ void cacheMeasurementClass(final Class... classVarAgrs) { } } - String getMeasurementName(final Class clazz) { + public String getMeasurementName(final Class clazz) { return ((Measurement) clazz.getAnnotation(Measurement.class)).name(); } + public ConcurrentMap getColNameAndFieldMap(final Class clazz) { + return CLASS_FIELD_CACHE.get(clazz.getName()); + } + List parseSeriesAs(final QueryResult.Series series, final Class clazz, final List result) { + return parseSeriesAs(series, clazz, result, TimeUnit.MILLISECONDS); + } + + List parseSeriesAs(final QueryResult.Series series, final Class clazz, final List result, + final TimeUnit precision) { int columnSize = series.getColumns().size(); - ConcurrentMap colNameAndFieldMap = CLASS_FIELD_CACHE.get(clazz.getName()); + ConcurrentMap colNameAndFieldMap = getColNameAndFieldMap(clazz); try { T object = null; for (List row : series.getValues()) { @@ -188,7 +247,7 @@ List parseSeriesAs(final QueryResult.Series series, final Class clazz, if (object == null) { object = clazz.newInstance(); } - setFieldValue(object, correspondingField, row.get(i)); + setFieldValue(object, correspondingField, row.get(i), precision); } } // When the "GROUP BY" clause is used, "tags" are returned as Map and @@ -200,7 +259,7 @@ List parseSeriesAs(final QueryResult.Series series, final Class clazz, Field correspondingField = colNameAndFieldMap.get(entry.getKey()/*InfluxDB columnName*/); if (correspondingField != null) { // I don't think it is possible to reach here without a valid "object" - setFieldValue(object, correspondingField, entry.getValue()); + setFieldValue(object, correspondingField, entry.getValue(), precision); } } } @@ -223,10 +282,11 @@ List parseSeriesAs(final QueryResult.Series series, final Class clazz, * @param object * @param field * @param value + * @param precision * @throws IllegalArgumentException * @throws IllegalAccessException */ - void setFieldValue(final T object, final Field field, final Object value) + void setFieldValue(final T object, final Field field, final Object value, final TimeUnit precision) throws IllegalArgumentException, IllegalAccessException { if (value == null) { return; @@ -236,7 +296,7 @@ void setFieldValue(final T object, final Field field, final Object value) if (!field.isAccessible()) { field.setAccessible(true); } - if (fieldValueModified(fieldType, field, object, value) + if (fieldValueModified(fieldType, field, object, value, precision) || fieldValueForPrimitivesModified(fieldType, field, object, value) || fieldValueForPrimitiveWrappersModified(fieldType, field, object, value)) { return; @@ -252,7 +312,8 @@ void setFieldValue(final T object, final Field field, final Object value) } } - boolean fieldValueModified(final Class fieldType, final Field field, final T object, final Object value) + boolean fieldValueModified(final Class fieldType, final Field field, final T object, final Object value, + final TimeUnit precision) throws IllegalArgumentException, IllegalAccessException { if (String.class.isAssignableFrom(fieldType)) { field.set(object, String.valueOf(value)); @@ -263,9 +324,11 @@ boolean fieldValueModified(final Class fieldType, final Field field, fina if (value instanceof String) { instant = Instant.from(ISO8601_FORMATTER.parse(String.valueOf(value))); } else if (value instanceof Long) { - instant = Instant.ofEpochMilli((Long) value); + instant = Instant.ofEpochMilli(toMillis((Long) value, precision)); } else if (value instanceof Double) { - instant = Instant.ofEpochMilli(((Double) value).longValue()); + instant = Instant.ofEpochMilli(toMillis(((Double) value).longValue(), precision)); + } else if (value instanceof Integer) { + instant = Instant.ofEpochMilli(toMillis(((Integer) value).longValue(), precision)); } else { throw new InfluxDBMapperException("Unsupported type " + field.getClass() + " for field " + field.getName()); } @@ -316,4 +379,9 @@ boolean fieldValueForPrimitiveWrappersModified(final Class fieldType, fin } return false; } + + private Long toMillis(final Long value, final TimeUnit precision) { + + return TimeUnit.MILLISECONDS.convert(value, precision); + } } diff --git a/src/test/java/org/influxdb/impl/InfluxDBResultMapperTest.java b/src/test/java/org/influxdb/impl/InfluxDBResultMapperTest.java index 688ab9387..445528fb6 100644 --- a/src/test/java/org/influxdb/impl/InfluxDBResultMapperTest.java +++ b/src/test/java/org/influxdb/impl/InfluxDBResultMapperTest.java @@ -29,12 +29,12 @@ import java.util.Map; import java.util.Random; import java.util.UUID; +import java.util.concurrent.TimeUnit; import org.influxdb.InfluxDBMapperException; import org.influxdb.annotation.Column; import org.influxdb.annotation.Measurement; import org.influxdb.dto.QueryResult; -import org.influxdb.impl.InfluxDBResultMapper; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.junit.platform.runner.JUnitPlatform; @@ -220,6 +220,26 @@ public void testFieldValueModified_DateAsISO8601() { Assertions.assertTrue(result.size() == 1); } + @Test + public void testFieldValueModified_DateAsInteger() { + // Given... + mapper.cacheMeasurementClass(MyCustomMeasurement.class); + + List columnList = Arrays.asList("time"); + List firstSeriesResult = Arrays.asList(1_000); + + QueryResult.Series series = new QueryResult.Series(); + series.setColumns(columnList); + series.setValues(Arrays.asList(firstSeriesResult)); + + //When... + List result = new LinkedList<>(); + mapper.parseSeriesAs(series, MyCustomMeasurement.class, result); + + //Then... + Assertions.assertTrue(result.size() == 1); + } + @Test public void testUnsupportedField() { // Given... @@ -335,6 +355,60 @@ public void testToPOJO_ticket363() { Assertions.assertEquals(1, result.get(0).time.getNano(), "incorrect value for the nanoseconds field"); } + @Test + void testToPOJO_Precision() { + // Given... + mapper.cacheMeasurementClass(MyCustomMeasurement.class); + + List columnList = Arrays.asList("time"); + List firstSeriesResult = Arrays.asList(1_500_000L); + + QueryResult.Series series = new QueryResult.Series(); + series.setName("CustomMeasurement"); + series.setColumns(columnList); + series.setValues(Arrays.asList(firstSeriesResult)); + + QueryResult.Result internalResult = new QueryResult.Result(); + internalResult.setSeries(Arrays.asList(series)); + + QueryResult queryResult = new QueryResult(); + queryResult.setResults(Arrays.asList(internalResult)); + + // When... + List result = mapper.toPOJO(queryResult, MyCustomMeasurement.class, TimeUnit.SECONDS); + + // Then... + Assertions.assertEquals(1, result.size(), "incorrect number of elements"); + Assertions.assertEquals(1_500_000_000L, result.get(0).time.toEpochMilli(), "incorrect value for the millis field"); + } + + @Test + void testToPOJO_SetMeasureName() { + // Given... + mapper.cacheMeasurementClass(MyCustomMeasurement.class); + + List columnList = Arrays.asList("uuid"); + List firstSeriesResult = Arrays.asList(UUID.randomUUID().toString()); + + QueryResult.Series series = new QueryResult.Series(); + series.setName("MySeriesName"); + series.setColumns(columnList); + series.setValues(Arrays.asList(firstSeriesResult)); + + QueryResult.Result internalResult = new QueryResult.Result(); + internalResult.setSeries(Arrays.asList(series)); + + QueryResult queryResult = new QueryResult(); + queryResult.setResults(Arrays.asList(internalResult)); + + //When... + List result = + mapper.toPOJO(queryResult, MyCustomMeasurement.class, "MySeriesName"); + + //Then... + Assertions.assertTrue(result.size() == 1); + } + @Measurement(name = "CustomMeasurement") static class MyCustomMeasurement { diff --git a/src/test/java/org/influxdb/impl/RetryCapableBatchWriterTest.java b/src/test/java/org/influxdb/impl/RetryCapableBatchWriterTest.java index 189f4d6d1..9e30586e8 100644 --- a/src/test/java/org/influxdb/impl/RetryCapableBatchWriterTest.java +++ b/src/test/java/org/influxdb/impl/RetryCapableBatchWriterTest.java @@ -107,9 +107,11 @@ public void testAllNonRecoverableExceptions() { InfluxDBException nonRecoverable5 = InfluxDBException.buildExceptionForErrorState(createErrorBody("field type conflict 'abc'")); InfluxDBException nonRecoverable6 = new InfluxDBException.RetryBufferOverrunException(createErrorBody("Retry BufferOverrun Exception")); InfluxDBException nonRecoverable7 = InfluxDBException.buildExceptionForErrorState(createErrorBody("user is not authorized to write to database")); - + InfluxDBException nonRecoverable8 = InfluxDBException.buildExceptionForErrorState(createErrorBody("authorization failed")); + InfluxDBException nonRecoverable9 = InfluxDBException.buildExceptionForErrorState(createErrorBody("username required")); + List exceptions = Arrays.asList(nonRecoverable1, nonRecoverable2, nonRecoverable3, - nonRecoverable4, nonRecoverable5, nonRecoverable6, nonRecoverable7); + nonRecoverable4, nonRecoverable5, nonRecoverable6, nonRecoverable7, nonRecoverable8, nonRecoverable9); int size = exceptions.size(); doAnswer(new TestAnswer() { int i = 0; @@ -224,8 +226,15 @@ protected void check(InvocationOnMock invocation) { Assertions.assertEquals(bp1, captor4Write.getAllValues().get(1)); //bp2 written Assertions.assertEquals(bp2, captor4Write.getAllValues().get(2)); - } + + @Test + void defaultExceptionIsRecoverable() { + InfluxDBException unknownError = InfluxDBException.buildExceptionForErrorState(createErrorBody("unknown error")); + + Assertions.assertTrue(unknownError.isRetryWorth()); + } + private static String createErrorBody(String errorMessage) { return MessageFormat.format("'{' \"error\": \"{0}\" '}'", errorMessage); } From 69e635d9282d21400b4e6c807b5ad8af1ae9ac81 Mon Sep 17 00:00:00 2001 From: bednar Date: Fri, 24 Aug 2018 10:58:28 +0200 Subject: [PATCH 237/745] InfluxDBResultMapper: removed useless changes, used primitives to remove unnecessary unboxing --- .../org/influxdb/impl/InfluxDBResultMapper.java | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/src/main/java/org/influxdb/impl/InfluxDBResultMapper.java b/src/main/java/org/influxdb/impl/InfluxDBResultMapper.java index 1b99073be..000f517b7 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBResultMapper.java +++ b/src/main/java/org/influxdb/impl/InfluxDBResultMapper.java @@ -202,7 +202,7 @@ void throwExceptionIfResultWithError(final QueryResult queryResult) { }); } - public void cacheMeasurementClass(final Class... classVarAgrs) { + void cacheMeasurementClass(final Class... classVarAgrs) { for (Class clazz : classVarAgrs) { if (CLASS_FIELD_CACHE.containsKey(clazz.getName())) { continue; @@ -222,14 +222,10 @@ public void cacheMeasurementClass(final Class... classVarAgrs) { } } - public String getMeasurementName(final Class clazz) { + String getMeasurementName(final Class clazz) { return ((Measurement) clazz.getAnnotation(Measurement.class)).name(); } - public ConcurrentMap getColNameAndFieldMap(final Class clazz) { - return CLASS_FIELD_CACHE.get(clazz.getName()); - } - List parseSeriesAs(final QueryResult.Series series, final Class clazz, final List result) { return parseSeriesAs(series, clazz, result, TimeUnit.MILLISECONDS); } @@ -237,7 +233,7 @@ List parseSeriesAs(final QueryResult.Series series, final Class clazz, List parseSeriesAs(final QueryResult.Series series, final Class clazz, final List result, final TimeUnit precision) { int columnSize = series.getColumns().size(); - ConcurrentMap colNameAndFieldMap = getColNameAndFieldMap(clazz); + ConcurrentMap colNameAndFieldMap = CLASS_FIELD_CACHE.get(clazz.getName()); try { T object = null; for (List row : series.getValues()) { @@ -324,7 +320,7 @@ boolean fieldValueModified(final Class fieldType, final Field field, fina if (value instanceof String) { instant = Instant.from(ISO8601_FORMATTER.parse(String.valueOf(value))); } else if (value instanceof Long) { - instant = Instant.ofEpochMilli(toMillis((Long) value, precision)); + instant = Instant.ofEpochMilli(toMillis((long) value, precision)); } else if (value instanceof Double) { instant = Instant.ofEpochMilli(toMillis(((Double) value).longValue(), precision)); } else if (value instanceof Integer) { @@ -380,7 +376,7 @@ boolean fieldValueForPrimitiveWrappersModified(final Class fieldType, fin return false; } - private Long toMillis(final Long value, final TimeUnit precision) { + private Long toMillis(final long value, final TimeUnit precision) { return TimeUnit.MILLISECONDS.convert(value, precision); } From 4a1f9291d8fcc0abe850fc61a632e2054631f553 Mon Sep 17 00:00:00 2001 From: bednar Date: Fri, 24 Aug 2018 15:00:07 +0200 Subject: [PATCH 238/745] Using the cloned Retrofit.Builder to avoid potential ConcurrentModificationException --- src/main/java/org/influxdb/impl/InfluxDBImpl.java | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index aada0c75b..800badb1e 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -145,13 +145,13 @@ public InfluxDBImpl(final String url, final String username, final String passwo setLogLevel(LOG_LEVEL); this.gzipRequestInterceptor = new GzipRequestInterceptor(); - OkHttpClient.Builder clonedBuilder = okHttpBuilder.build().newBuilder(); - clonedBuilder.addInterceptor(loggingInterceptor).addInterceptor(gzipRequestInterceptor). + OkHttpClient.Builder clonedOkHttpBuilder = okHttpBuilder.build().newBuilder(); + clonedOkHttpBuilder.addInterceptor(loggingInterceptor).addInterceptor(gzipRequestInterceptor). addInterceptor(new BasicAuthInterceptor(username, password)); Factory converterFactory = null; switch (responseFormat) { case MSGPACK: - clonedBuilder.addInterceptor(chain -> { + clonedOkHttpBuilder.addInterceptor(chain -> { Request request = chain.request().newBuilder().addHeader("Accept", APPLICATION_MSGPACK).build(); return chain.proceed(request); }); @@ -169,8 +169,9 @@ public InfluxDBImpl(final String url, final String username, final String passwo break; } - this.retrofit = retrofitBuilder.baseUrl(url).client( - clonedBuilder.build()).addConverterFactory(converterFactory).build(); + Retrofit.Builder clonedRetrofitBuilder = retrofitBuilder.baseUrl(url).build().newBuilder(); + this.retrofit = clonedRetrofitBuilder.client(clonedOkHttpBuilder.build()) + .addConverterFactory(converterFactory).build(); this.influxDBService = this.retrofit.create(InfluxDBService.class); } From caa4767ea929cfc52e66a82831423d24e826aefd Mon Sep 17 00:00:00 2001 From: gkatzioura Date: Mon, 27 Aug 2018 20:09:47 +0100 Subject: [PATCH 239/745] Added aliases, queries, clauses --- .../java/org/influxdb/querybuilder/Alias.java | 26 +++ .../org/influxdb/querybuilder/Appendable.java | 7 + .../org/influxdb/querybuilder/Appender.java | 92 ++++++++++ .../org/influxdb/querybuilder/BuiltQuery.java | 49 +++++ .../querybuilder/BuiltQueryDecorator.java | 32 ++++ .../org/influxdb/querybuilder/Column.java | 20 ++ .../org/influxdb/querybuilder/Distinct.java | 22 +++ .../org/influxdb/querybuilder/Function.java | 34 ++++ .../querybuilder/FunctionFactory.java | 39 ++++ .../org/influxdb/querybuilder/Operations.java | 14 ++ .../org/influxdb/querybuilder/Ordering.java | 23 +++ .../org/influxdb/querybuilder/RawString.java | 17 ++ .../org/influxdb/querybuilder/Select.java | 171 ++++++++++++++++++ .../org/influxdb/querybuilder/Selection.java | 112 ++++++++++++ .../querybuilder/clauses/AbstractClause.java | 11 ++ .../influxdb/querybuilder/clauses/Clause.java | 7 + .../querybuilder/clauses/CompoundClause.java | 38 ++++ .../querybuilder/clauses/ContainsClause.java | 9 + .../clauses/NegativeRegexClause.java | 25 +++ .../querybuilder/clauses/RegexClause.java | 25 +++ .../querybuilder/clauses/SimpleClause.java | 22 +++ 21 files changed, 795 insertions(+) create mode 100644 src/main/java/org/influxdb/querybuilder/Alias.java create mode 100644 src/main/java/org/influxdb/querybuilder/Appendable.java create mode 100644 src/main/java/org/influxdb/querybuilder/Appender.java create mode 100644 src/main/java/org/influxdb/querybuilder/BuiltQuery.java create mode 100644 src/main/java/org/influxdb/querybuilder/BuiltQueryDecorator.java create mode 100644 src/main/java/org/influxdb/querybuilder/Column.java create mode 100644 src/main/java/org/influxdb/querybuilder/Distinct.java create mode 100644 src/main/java/org/influxdb/querybuilder/Function.java create mode 100644 src/main/java/org/influxdb/querybuilder/FunctionFactory.java create mode 100644 src/main/java/org/influxdb/querybuilder/Operations.java create mode 100644 src/main/java/org/influxdb/querybuilder/Ordering.java create mode 100644 src/main/java/org/influxdb/querybuilder/RawString.java create mode 100644 src/main/java/org/influxdb/querybuilder/Select.java create mode 100644 src/main/java/org/influxdb/querybuilder/Selection.java create mode 100644 src/main/java/org/influxdb/querybuilder/clauses/AbstractClause.java create mode 100644 src/main/java/org/influxdb/querybuilder/clauses/Clause.java create mode 100644 src/main/java/org/influxdb/querybuilder/clauses/CompoundClause.java create mode 100644 src/main/java/org/influxdb/querybuilder/clauses/ContainsClause.java create mode 100644 src/main/java/org/influxdb/querybuilder/clauses/NegativeRegexClause.java create mode 100644 src/main/java/org/influxdb/querybuilder/clauses/RegexClause.java create mode 100644 src/main/java/org/influxdb/querybuilder/clauses/SimpleClause.java diff --git a/src/main/java/org/influxdb/querybuilder/Alias.java b/src/main/java/org/influxdb/querybuilder/Alias.java new file mode 100644 index 000000000..d442c9b2f --- /dev/null +++ b/src/main/java/org/influxdb/querybuilder/Alias.java @@ -0,0 +1,26 @@ +package org.influxdb.querybuilder; + +public class Alias { + + private final Object column; + private final String alias; + + Alias(Object column, String alias) { + this.column = column; + this.alias = alias; + } + + public Object getColumn() { + return column; + } + + public String getAlias() { + return alias; + } + + @Override + public String toString() { + return String.format("%s AS %s", column, alias); + } + +} diff --git a/src/main/java/org/influxdb/querybuilder/Appendable.java b/src/main/java/org/influxdb/querybuilder/Appendable.java new file mode 100644 index 000000000..1eaafa417 --- /dev/null +++ b/src/main/java/org/influxdb/querybuilder/Appendable.java @@ -0,0 +1,7 @@ +package org.influxdb.querybuilder; + +public interface Appendable { + + void appendTo(StringBuilder sb); + +} diff --git a/src/main/java/org/influxdb/querybuilder/Appender.java b/src/main/java/org/influxdb/querybuilder/Appender.java new file mode 100644 index 000000000..19762f6bf --- /dev/null +++ b/src/main/java/org/influxdb/querybuilder/Appender.java @@ -0,0 +1,92 @@ +package org.influxdb.querybuilder; + +import java.util.List; + +public class Appender { + + public static StringBuilder joinAndAppend(StringBuilder stringBuilder, String separator, List values) { + for (int i = 0; i < values.size(); i++) { + if (i > 0) + stringBuilder.append(separator); + values.get(i).appendTo(stringBuilder); + } + return stringBuilder; + } + + public static StringBuilder joinAndAppendNames(StringBuilder stringBuilder, List values) { + for (int i = 0; i < values.size(); i++) { + if (i > 0) + stringBuilder.append(","); + appendName(values.get(i), stringBuilder); + } + return stringBuilder; + } + + public static StringBuilder appendValue(Object value, StringBuilder stringBuilder) { + if (value == null) { + stringBuilder.append("null"); + } else if (value instanceof Function) { + Function fcall = (Function) value; + stringBuilder.append(fcall.getName()).append('('); + for (int i = 0; i < fcall.getParameters().length; i++) { + if (i > 0) + stringBuilder.append(','); + appendValue(fcall.getParameters()[i], stringBuilder); + } + stringBuilder.append(')'); + } else if (value instanceof Column) { + appendName(((Column) value).getName(), stringBuilder); + } else if (value instanceof RawString) { + stringBuilder.append(value.toString()); + } else if (value instanceof String) { + stringBuilder.append("'").append(value).append("'"); + } else if (value != null) { + stringBuilder.append(value); + } else { + stringBuilder.append('?'); + return stringBuilder; + } + return stringBuilder; + } + + public static StringBuilder appendName(String name, StringBuilder stringBuilder) { + name = name.trim(); + if (name.startsWith("\"")) { + stringBuilder.append(name); + } else { + stringBuilder.append('"').append(name).append('"'); + } + return stringBuilder; + } + + public static StringBuilder appendName(Object name, StringBuilder stringBuilder) { + if (name instanceof String) { + appendName((String) name, stringBuilder); + } else if (name instanceof Column) { + appendName(((Column) name).getName(), stringBuilder); + } else if (name instanceof Function) { + Function functionCall = (Function) name; + stringBuilder.append(functionCall.getName()).append('('); + for (int i = 0; i < functionCall.getParameters().length; i++) { + if (i > 0) + stringBuilder.append(','); + appendValue(functionCall.getParameters()[i], stringBuilder); + } + stringBuilder.append(')'); + } else if (name instanceof Alias) { + Alias alias = (Alias) name; + appendName(alias.getColumn(), stringBuilder); + stringBuilder.append(" AS ").append(alias.getAlias()); + } else if (name instanceof RawString) { + stringBuilder.append(name); + } else if (name instanceof Distinct) { + Distinct distinct = (Distinct) name; + stringBuilder.append("DISTINCT "); + appendName(distinct.getExpression(), stringBuilder); + } else { + throw new IllegalArgumentException(String.format("Invalid column %s of type unknown of the query builder", name)); + } + return stringBuilder; + } + +} \ No newline at end of file diff --git a/src/main/java/org/influxdb/querybuilder/BuiltQuery.java b/src/main/java/org/influxdb/querybuilder/BuiltQuery.java new file mode 100644 index 000000000..06e16ccdf --- /dev/null +++ b/src/main/java/org/influxdb/querybuilder/BuiltQuery.java @@ -0,0 +1,49 @@ +package org.influxdb.querybuilder; + +import org.influxdb.dto.Query; + +public abstract class BuiltQuery extends Query { + + public BuiltQuery(String database) { + super(null, database); + } + + public BuiltQuery(String database, boolean requiresPost) { + super(null, database, requiresPost); + } + + abstract StringBuilder buildQueryString(); + + static StringBuilder addSemicolonIfNeeded(StringBuilder stringBuilder) { + int length = moveToEndOfText(stringBuilder); + if (length == 0 || stringBuilder.charAt(length - 1) != ';') + stringBuilder.append(';'); + return stringBuilder; + } + + private static int moveToEndOfText(StringBuilder stringBuilder) { + int length = stringBuilder.length(); + while (length > 0 && stringBuilder.charAt(length - 1) <= ' ') + length -= 1; + if (length != stringBuilder.length()) + stringBuilder.setLength(length); + return length; + } + + @Override + public String getCommand() { + StringBuilder sb = buildQueryString(); + addSemicolonIfNeeded(sb); + return sb.toString(); + } + + @Override + public String getCommandWithUrlEncoded() { + return encode(getCommand()); + } + + @Override + public String toString() { + return getCommandWithUrlEncoded(); + } +} diff --git a/src/main/java/org/influxdb/querybuilder/BuiltQueryDecorator.java b/src/main/java/org/influxdb/querybuilder/BuiltQueryDecorator.java new file mode 100644 index 000000000..e05a5ff64 --- /dev/null +++ b/src/main/java/org/influxdb/querybuilder/BuiltQueryDecorator.java @@ -0,0 +1,32 @@ +package org.influxdb.querybuilder; + +public class BuiltQueryDecorator extends BuiltQuery { + + T query; + + BuiltQueryDecorator(T query) { + super(null); + this.query = query; + } + + @Override + public String getCommand() { + return query.getCommand(); + } + + @Override + StringBuilder buildQueryString() { + return query.buildQueryString(); + } + + @Override + public String getDatabase() { + return query.getDatabase(); + } + + @Override + public String toString() { + return query.toString(); + } + +} diff --git a/src/main/java/org/influxdb/querybuilder/Column.java b/src/main/java/org/influxdb/querybuilder/Column.java new file mode 100644 index 000000000..27d7869ea --- /dev/null +++ b/src/main/java/org/influxdb/querybuilder/Column.java @@ -0,0 +1,20 @@ +package org.influxdb.querybuilder; + +public class Column { + + private final String name; + + Column(String name) { + this.name = name; + } + + public String getName() { + return name; + } + + @Override + public String toString() { + return name; + } + +} diff --git a/src/main/java/org/influxdb/querybuilder/Distinct.java b/src/main/java/org/influxdb/querybuilder/Distinct.java new file mode 100644 index 000000000..cd8b8676e --- /dev/null +++ b/src/main/java/org/influxdb/querybuilder/Distinct.java @@ -0,0 +1,22 @@ +package org.influxdb.querybuilder; + +public class Distinct { + + /** + * Distinct might as well contain an expression + */ + private final Object expression; + + Distinct(Object expression) { + this.expression = expression; + } + + public Object getExpression() { + return expression; + } + + @Override + public String toString() { + return String.format("DISTINCT %s", expression); + } +} diff --git a/src/main/java/org/influxdb/querybuilder/Function.java b/src/main/java/org/influxdb/querybuilder/Function.java new file mode 100644 index 000000000..58090ea57 --- /dev/null +++ b/src/main/java/org/influxdb/querybuilder/Function.java @@ -0,0 +1,34 @@ +package org.influxdb.querybuilder; + +public class Function { + + private final String name; + private final Object[] parameters; + + Function(String name, Object... parameters) { + this.name = name; + this.parameters = parameters; + } + + public String getName() { + return name; + } + + public Object[] getParameters() { + return parameters; + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(name).append('('); + for (int i = 0; i < parameters.length; i++) { + if (i > 0) + sb.append(','); + sb.append(parameters[i]); + } + sb.append(')'); + return sb.toString(); + } + +} diff --git a/src/main/java/org/influxdb/querybuilder/FunctionFactory.java b/src/main/java/org/influxdb/querybuilder/FunctionFactory.java new file mode 100644 index 000000000..6227d2928 --- /dev/null +++ b/src/main/java/org/influxdb/querybuilder/FunctionFactory.java @@ -0,0 +1,39 @@ +package org.influxdb.querybuilder; + +public class FunctionFactory { + + public static Object count(Object column) { + if (column instanceof String) + column = column(((String) column)); + return new Function("COUNT", column); + } + + public static Object max(Object column) { + if (column instanceof String) + column = column(((String) column)); + return new Function("MAX", column); + } + + public static Object min(Object column) { + if (column instanceof String) + column = column(((String) column)); + return new Function("MIN", column); + } + + public static Object sum(Object column) { + if (column instanceof String) + column = column(((String) column)); + return new Function("SUM", column); + } + + public static Object mean(Object column) { + if (column instanceof String) + column = column(((String) column)); + return new Function("MEAN", column); + } + + private static Object column(String name) { + return new Column(name); + } + +} diff --git a/src/main/java/org/influxdb/querybuilder/Operations.java b/src/main/java/org/influxdb/querybuilder/Operations.java new file mode 100644 index 000000000..edb946a08 --- /dev/null +++ b/src/main/java/org/influxdb/querybuilder/Operations.java @@ -0,0 +1,14 @@ +package org.influxdb.querybuilder; + +public class Operations { + + public static final String EQ = "="; + public static final String NE = "!="; + public static final String LT = "<"; + public static final String LTE = "<="; + public static final String GT = ">"; + public static final String GTE = ">="; + public static final String EQR = "=~"; + public static final String NER = "~!"; + +} diff --git a/src/main/java/org/influxdb/querybuilder/Ordering.java b/src/main/java/org/influxdb/querybuilder/Ordering.java new file mode 100644 index 000000000..a60bb378c --- /dev/null +++ b/src/main/java/org/influxdb/querybuilder/Ordering.java @@ -0,0 +1,23 @@ +package org.influxdb.querybuilder; + +public class Ordering implements Appendable { + + private final boolean isDesc; + + private static final String TIME_KEY = "time"; + + /** + * Influxdb ordering currently supports onlye time + * @param isDesc + */ + Ordering(boolean isDesc) { + this.isDesc = isDesc; + } + + @Override + public void appendTo(StringBuilder sb) { + Appender.appendName(TIME_KEY, sb); + sb.append(isDesc ? " DESC" : " ASC"); + } + +} \ No newline at end of file diff --git a/src/main/java/org/influxdb/querybuilder/RawString.java b/src/main/java/org/influxdb/querybuilder/RawString.java new file mode 100644 index 000000000..b8499ec6e --- /dev/null +++ b/src/main/java/org/influxdb/querybuilder/RawString.java @@ -0,0 +1,17 @@ +package org.influxdb.querybuilder; + +public class RawString { + + private final String str; + + public RawString(String str) { + this.str = str; + } + + @Override + public String toString() { + return str; + } + +} + diff --git a/src/main/java/org/influxdb/querybuilder/Select.java b/src/main/java/org/influxdb/querybuilder/Select.java new file mode 100644 index 000000000..fd74e60d1 --- /dev/null +++ b/src/main/java/org/influxdb/querybuilder/Select.java @@ -0,0 +1,171 @@ +package org.influxdb.querybuilder; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import org.influxdb.querybuilder.clauses.Clause; + +public class Select extends BuiltQuery { + + + private final String table; + private final boolean isDistinct; + private final List columns; + private final Where where; + private Ordering ordering; + private List groupByColumns; + private Integer limit; + private Long offSet; + + Select(String database, + String table, + List columns, + boolean isDistinct) { + super(database); + this.table = table; + this.columns = columns; + this.isDistinct = isDistinct; + this.where = new Where(this); + } + + @Override + StringBuilder buildQueryString() { + StringBuilder builder = new StringBuilder(); + + builder.append("SELECT "); + + if (isDistinct) + if (columns.size() > 1) { + throw new IllegalStateException("DISTINCT function can only be used with one column"); + } + + if (columns == null) { + builder.append('*'); + } else { + Appender.joinAndAppendNames(builder, columns); + } + builder.append(" FROM "); + + Appender.appendName(table, builder); + + if (!where.clauses.isEmpty()) { + builder.append(" WHERE "); + Appender.joinAndAppend(builder, " AND ", where.clauses); + } + + if (groupByColumns != null) { + builder.append(" GROUP BY "); + Appender.joinAndAppendNames(builder, groupByColumns); + } + + if (ordering != null) { + builder.append(" ORDER BY "); + Appender.joinAndAppend(builder, ",", Collections.singletonList(ordering)); + } + + if (limit != null) { + builder.append(" LIMIT ").append(limit); + } + + if (offSet != null) { + builder.append(" OFFSET ").append(offSet); + } + + return builder; + } + + + public Where where(Clause clause) { + return where.and(clause); + } + + public Where where() { + return where; + } + + public Select orderBy(Ordering ordering) { + + this.ordering = ordering; + return this; + } + + public Select groupBy(Object... columns) { + this.groupByColumns = Arrays.asList(columns); + return this; + } + + public Select limit(int limit) { + if (limit <= 0) + throw new IllegalArgumentException("Invalid LIMIT value, must be strictly positive"); + + if (this.limit != null) + throw new IllegalStateException("A LIMIT value has already been provided"); + + this.limit = limit; + return this; + } + + public Select limit(int limit, long offSet) { + if (limit <= 0|| offSet<=0) + throw new IllegalArgumentException("Invalid LIMIT and OFFSET Value, must be strictly positive"); + + this.limit = limit; + this.offSet = offSet; + return this; + } + + public static class Where extends BuiltQueryDecorator { + + private final List clauses = new ArrayList(); + + Where(Select statement) { + super(statement); } - public Select orderBy(Ordering ordering) { + public Where and(Clause clause) { + clauses.add(clause); + return this; + } - this.ordering = ordering; - return this; + public Select orderBy(Ordering orderings) { + return query.orderBy(orderings); } public Select groupBy(Object... columns) { - this.groupByColumns = Arrays.asList(columns); - return this; + return query.groupBy(columns); } public Select limit(int limit) { - if (limit <= 0) - throw new IllegalArgumentException("Invalid LIMIT value, must be strictly positive"); - - if (this.limit != null) - throw new IllegalStateException("A LIMIT value has already been provided"); - - this.limit = limit; - return this; + return query.limit(limit); } public Select limit(int limit, long offSet) { - if (limit <= 0|| offSet<=0) - throw new IllegalArgumentException("Invalid LIMIT and OFFSET Value, must be strictly positive"); - - this.limit = limit; - this.offSet = offSet; - return this; + return query.limit(limit, offSet); } + } - public static class Where extends BuiltQueryDecorator { private final List clauses = new ArrayList(); - Where(Select statement) { + Where(final Select statement) { super(statement); } - public Where and(Clause clause) { + public Where and(final Clause clause) { clauses.add(clause); return this; } - public Select orderBy(Ordering orderings) { + public Select orderBy(final Ordering orderings) { return query.orderBy(orderings); } - public Select groupBy(Object... columns) { + public Select groupBy(final Object... columns) { return query.groupBy(columns); } - public Select limit(int limit) { + public Select limit(final int limit) { return query.limit(limit); } - public Select limit(int limit, long offSet) { + public Select limit(final int limit, final long offSet) { return query.limit(limit, offSet); } } @@ -157,9 +161,10 @@ public static class Builder { protected boolean requiresPost; protected boolean isDistinct; - Builder() {} + Builder() { + } - public Builder(List columns) { + public Builder(final List columns) { this.columns = columns; } @@ -168,11 +173,11 @@ public Builder requiresPost() { return this; } - public Select from(String table) { + public Select from(final String table) { return from(null, table); } - public Select from(String database, String table) { + public Select from(final String database, final String table) { return new Select(database, table, columns, isDistinct, requiresPost); } } diff --git a/src/main/java/org/influxdb/querybuilder/Selection.java b/src/main/java/org/influxdb/querybuilder/Selection.java index 8e3585dad..324c39686 100644 --- a/src/main/java/org/influxdb/querybuilder/Selection.java +++ b/src/main/java/org/influxdb/querybuilder/Selection.java @@ -7,7 +7,7 @@ public class Selection extends Select.Builder { private static final List COUNT_ALL = - Collections.singletonList(new Function("COUNT", new RawString("*"))); + Collections.singletonList(new Function("COUNT", new RawText("*"))); private Object currentSelection; @@ -24,7 +24,7 @@ public Selection requiresPost() { return this; } - public Selection as(String aliasName) { + public Selection as(final String aliasName) { assertColumnIsSelected(); Object alias = new Alias(currentSelection, aliasName); currentSelection = null; @@ -37,82 +37,94 @@ private void assertColumnIsSelected() { } } - private Selection moveToColumns(Object name) { - if (columns == null) columns = new ArrayList<>(); + private Selection moveToColumns(final Object name) { + if (columns == null) { + columns = new ArrayList<>(); + } columns.add(name); return this; } - private Selection addToCurrentColumn(Object name) { - if (currentSelection != null) moveToColumns(currentSelection); + private Selection addToCurrentColumn(final Object name) { + if (currentSelection != null) { + moveToColumns(currentSelection); + } currentSelection = name; return this; } public Select.Builder all() { - if (isDistinct) + if (isDistinct) { throw new IllegalStateException("DISTINCT function can only be used with one column"); - if (columns != null) + } + if (columns != null) { throw new IllegalStateException("Can't select all columns over columns selected previously"); - if (currentSelection != null) + } + if (currentSelection != null) { throw new IllegalStateException("Can't select all columns over columns selected previously"); + } return this; } public Select.Builder countAll() { - if (columns != null) + if (columns != null) { throw new IllegalStateException("Can't select all columns over columns selected previously"); - if (currentSelection != null) + } + if (currentSelection != null) { throw new IllegalStateException("Can't select all columns over columns selected previously"); - + } columns = COUNT_ALL; return this; } - public Selection column(String name) { + public Selection column(final String name) { return addToCurrentColumn(name); } - public Selection function(String name, Object... parameters) { + public Selection function(final String name, final Object... parameters) { return addToCurrentColumn(FunctionFactory.function(name, parameters)); } - public Selection raw(String rawString) { - return addToCurrentColumn(new RawString(rawString)); + public Selection raw(final String text) { + return addToCurrentColumn(new RawText(text)); } - public Selection count(Object column) { + public Selection count(final Object column) { return addToCurrentColumn(FunctionFactory.count(column)); } - public Selection max(Object column) { + public Selection max(final Object column) { return addToCurrentColumn(FunctionFactory.max(column)); } - public Selection min(Object column) { + public Selection min(final Object column) { return addToCurrentColumn(FunctionFactory.min(column)); } - public Selection sum(Object column) { + public Selection sum(final Object column) { return addToCurrentColumn(FunctionFactory.sum(column)); } - public Selection mean(Object column) { + public Selection mean(final Object column) { return addToCurrentColumn(FunctionFactory.mean(column)); } @Override - public Select from(String keyspace, String table) { - if (currentSelection != null) moveToColumns(currentSelection); + public Select from(final String keyspace, final String table) { + if (currentSelection != null) { + moveToColumns(currentSelection); + } currentSelection = null; return super.from(keyspace, table); } @Override - public Select from(String table) { - if (currentSelection != null) moveToColumns(currentSelection); + public Select from(final String table) { + if (currentSelection != null) { + moveToColumns(currentSelection); + } currentSelection = null; return super.from(table); } diff --git a/src/main/java/org/influxdb/querybuilder/clauses/AbstractClause.java b/src/main/java/org/influxdb/querybuilder/clauses/AbstractClause.java index f1c2ec002..473618e0a 100644 --- a/src/main/java/org/influxdb/querybuilder/clauses/AbstractClause.java +++ b/src/main/java/org/influxdb/querybuilder/clauses/AbstractClause.java @@ -4,7 +4,7 @@ public abstract class AbstractClause implements Clause { final String name; - AbstractClause(String name) { + AbstractClause(final String name) { this.name = name; } } diff --git a/src/main/java/org/influxdb/querybuilder/clauses/Clause.java b/src/main/java/org/influxdb/querybuilder/clauses/Clause.java index 90014c3ff..4e4820a7d 100644 --- a/src/main/java/org/influxdb/querybuilder/clauses/Clause.java +++ b/src/main/java/org/influxdb/querybuilder/clauses/Clause.java @@ -2,4 +2,5 @@ import org.influxdb.querybuilder.Appendable; -public interface Clause extends Appendable {} +public interface Clause extends Appendable { +} diff --git a/src/main/java/org/influxdb/querybuilder/clauses/ContainsClause.java b/src/main/java/org/influxdb/querybuilder/clauses/ContainsClause.java index 07ec6b3f9..b0a9ccee6 100644 --- a/src/main/java/org/influxdb/querybuilder/clauses/ContainsClause.java +++ b/src/main/java/org/influxdb/querybuilder/clauses/ContainsClause.java @@ -2,7 +2,7 @@ public class ContainsClause extends RegexClause { - public ContainsClause(String name, String value) { + public ContainsClause(final String name, final String value) { super(name, "/" + value + "/"); } } diff --git a/src/main/java/org/influxdb/querybuilder/clauses/NegativeRegexClause.java b/src/main/java/org/influxdb/querybuilder/clauses/NegativeRegexClause.java index 1059a8d47..0f472b330 100644 --- a/src/main/java/org/influxdb/querybuilder/clauses/NegativeRegexClause.java +++ b/src/main/java/org/influxdb/querybuilder/clauses/NegativeRegexClause.java @@ -2,22 +2,24 @@ import org.influxdb.querybuilder.Appender; import org.influxdb.querybuilder.Operations; -import org.influxdb.querybuilder.RawString; +import org.influxdb.querybuilder.RawText; public class NegativeRegexClause extends AbstractClause { - private final RawString value; + private final RawText value; - public NegativeRegexClause(String name, String value) { + public NegativeRegexClause(final String name, final String value) { super(name); - this.value = new RawString(value); + this.value = new RawText(value); - if (value == null) throw new IllegalArgumentException("Missing value for regex clause"); + if (value == null) { + throw new IllegalArgumentException("Missing value for regex clause"); + } } @Override - public void appendTo(StringBuilder sb) { - Appender.appendName(name, sb).append(" ").append(Operations.NER).append(" "); - Appender.appendValue(value, sb); + public void appendTo(final StringBuilder stringBuilder) { + Appender.appendName(name, stringBuilder).append(" ").append(Operations.NER).append(" "); + Appender.appendValue(value, stringBuilder); } } diff --git a/src/main/java/org/influxdb/querybuilder/clauses/RawTextClause.java b/src/main/java/org/influxdb/querybuilder/clauses/RawTextClause.java index 02f8edabe..4c067474b 100644 --- a/src/main/java/org/influxdb/querybuilder/clauses/RawTextClause.java +++ b/src/main/java/org/influxdb/querybuilder/clauses/RawTextClause.java @@ -1,15 +1,15 @@ package org.influxdb.querybuilder.clauses; import org.influxdb.querybuilder.Appender; -import org.influxdb.querybuilder.RawString; +import org.influxdb.querybuilder.RawText; public class RawTextClause extends AbstractClause { - private final RawString value; + private final RawText value; - public RawTextClause(String text) { + public RawTextClause(final String text) { super(""); - this.value = new RawString(text); + this.value = new RawText(text); if (text == null) { throw new IllegalArgumentException("Missing text for expression"); @@ -17,7 +17,7 @@ public RawTextClause(String text) { } @Override - public void appendTo(StringBuilder sb) { - Appender.appendValue(value, sb); + public void appendTo(final StringBuilder stringBuilder) { + Appender.appendValue(value, stringBuilder); } } diff --git a/src/main/java/org/influxdb/querybuilder/clauses/RegexClause.java b/src/main/java/org/influxdb/querybuilder/clauses/RegexClause.java index 645d06885..9ccdc8bd4 100644 --- a/src/main/java/org/influxdb/querybuilder/clauses/RegexClause.java +++ b/src/main/java/org/influxdb/querybuilder/clauses/RegexClause.java @@ -2,22 +2,24 @@ import org.influxdb.querybuilder.Appender; import org.influxdb.querybuilder.Operations; -import org.influxdb.querybuilder.RawString; +import org.influxdb.querybuilder.RawText; public class RegexClause extends AbstractClause { - private final RawString value; + private final RawText value; - public RegexClause(String name, String value) { + public RegexClause(final String name, final String value) { super(name); - this.value = new RawString(value); + this.value = new RawText(value); - if (value == null) throw new IllegalArgumentException("Missing value for regex clause"); + if (value == null) { + throw new IllegalArgumentException("Missing value for regex clause"); + } } @Override - public void appendTo(StringBuilder sb) { - Appender.appendName(name, sb).append(" ").append(Operations.EQR).append(" "); - Appender.appendValue(value, sb); + public void appendTo(final StringBuilder stringBuilder) { + Appender.appendName(name, stringBuilder).append(" ").append(Operations.EQR).append(" "); + Appender.appendValue(value, stringBuilder); } } diff --git a/src/main/java/org/influxdb/querybuilder/clauses/SimpleClause.java b/src/main/java/org/influxdb/querybuilder/clauses/SimpleClause.java index 628f7a09b..d8e5056ba 100644 --- a/src/main/java/org/influxdb/querybuilder/clauses/SimpleClause.java +++ b/src/main/java/org/influxdb/querybuilder/clauses/SimpleClause.java @@ -7,14 +7,14 @@ public class SimpleClause extends AbstractClause { private final String op; private final Object value; - public SimpleClause(String name, String op, Object value) { + public SimpleClause(final String name, final String op, final Object value) { super(name); this.op = op; this.value = value; } @Override - public void appendTo(StringBuilder sb) { + public void appendTo(final StringBuilder sb) { Appender.appendName(name, sb).append(op); Appender.appendValue(value, sb); } From d01b6001710b927fe3deeaed9ccb1cc98abc7a85 Mon Sep 17 00:00:00 2001 From: gkatzioura Date: Fri, 31 Aug 2018 09:48:44 +0100 Subject: [PATCH 264/745] Renamed functions --- src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java b/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java index 975bb798b..741301d52 100644 --- a/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java +++ b/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java @@ -333,7 +333,7 @@ public void testFunctionInsideFunction() { } @Test - public void testRawStringOnSelection() { + public void testRawTextOnSelection() { Query query = new Query("SELECT an expression on select FROM foobar LIMIT 1 OFFSET 20;", DATABASE); Query select = select().raw("an expression on select").from(DATABASE, "foobar").limit(1, 20); @@ -343,7 +343,7 @@ public void testRawStringOnSelection() { } @Test - public void testRawStringOnCondition() { + public void testRawTextOnCondition() { Query query = new Query("SELECT * FROM foobar WHERE text as condition LIMIT 1 OFFSET 20;", DATABASE); Query select = select().from(DATABASE, "foobar").where("text as condition").limit(1, 20); From 6ee3a9f1f63802c9773cd3d4dec982fac63873fb Mon Sep 17 00:00:00 2001 From: gkatzioura Date: Fri, 31 Aug 2018 21:12:36 +0100 Subject: [PATCH 265/745] Applied formatting --- .../org/influxdb/querybuilder/Appender.java | 7 ++++-- .../org/influxdb/querybuilder/BuiltQuery.java | 22 +++++++++---------- .../querybuilder/FunctionFactory.java | 3 +-- .../org/influxdb/querybuilder/Ordering.java | 1 + .../org/influxdb/querybuilder/Select.java | 2 +- .../querybuilder/clauses/RegexClause.java | 2 +- 6 files changed, 20 insertions(+), 17 deletions(-) diff --git a/src/main/java/org/influxdb/querybuilder/Appender.java b/src/main/java/org/influxdb/querybuilder/Appender.java index a2235e2f2..92b8fe5e4 100644 --- a/src/main/java/org/influxdb/querybuilder/Appender.java +++ b/src/main/java/org/influxdb/querybuilder/Appender.java @@ -11,7 +11,9 @@ private Appender() { } public static StringBuilder joinAndAppend( - final StringBuilder stringBuilder, final String separator, final List values) { + final StringBuilder stringBuilder, + final String separator, + final List values) { for (int i = 0; i < values.size(); i++) { if (i > 0) { stringBuilder.append(separator); @@ -21,7 +23,8 @@ public static StringBuilder joinAndAppend( return stringBuilder; } - public static StringBuilder joinAndAppendNames(final StringBuilder stringBuilder, final List values) { + public static StringBuilder joinAndAppendNames( + final StringBuilder stringBuilder, final List values) { for (int i = 0; i < values.size(); i++) { if (i > 0) { stringBuilder.append(","); diff --git a/src/main/java/org/influxdb/querybuilder/BuiltQuery.java b/src/main/java/org/influxdb/querybuilder/BuiltQuery.java index 135fe4de0..2d68d1312 100644 --- a/src/main/java/org/influxdb/querybuilder/BuiltQuery.java +++ b/src/main/java/org/influxdb/querybuilder/BuiltQuery.java @@ -1,20 +1,19 @@ package org.influxdb.querybuilder; +import static org.influxdb.querybuilder.Operations.EQ; +import static org.influxdb.querybuilder.Operations.GT; +import static org.influxdb.querybuilder.Operations.GTE; +import static org.influxdb.querybuilder.Operations.LT; +import static org.influxdb.querybuilder.Operations.LTE; +import static org.influxdb.querybuilder.Operations.NE; + import java.util.Arrays; import org.influxdb.dto.Query; import org.influxdb.querybuilder.clauses.Clause; import org.influxdb.querybuilder.clauses.ContainsClause; -import org.influxdb.querybuilder.clauses.SimpleClause; -import org.influxdb.querybuilder.clauses.RegexClause; import org.influxdb.querybuilder.clauses.NegativeRegexClause; - -import static org.influxdb.querybuilder.Operations.EQ; -import static org.influxdb.querybuilder.Operations.LT; -import static org.influxdb.querybuilder.Operations.NE; -import static org.influxdb.querybuilder.Operations.LTE; -import static org.influxdb.querybuilder.Operations.GT; -import static org.influxdb.querybuilder.Operations.GTE; - +import org.influxdb.querybuilder.clauses.RegexClause; +import org.influxdb.querybuilder.clauses.SimpleClause; public abstract class BuiltQuery extends Query { @@ -60,7 +59,8 @@ public String getCommandWithUrlEncoded() { } /** - * The query builder shall provide all the building blocks needed, only a static block shall be used. + * The query builder shall provide all the building blocks needed, only a static block shall be + * used. */ public static final class QueryBuilder { diff --git a/src/main/java/org/influxdb/querybuilder/FunctionFactory.java b/src/main/java/org/influxdb/querybuilder/FunctionFactory.java index a5cc26c21..1af25c06c 100644 --- a/src/main/java/org/influxdb/querybuilder/FunctionFactory.java +++ b/src/main/java/org/influxdb/querybuilder/FunctionFactory.java @@ -2,14 +2,13 @@ import static org.influxdb.querybuilder.Aggregations.COUNT; import static org.influxdb.querybuilder.Aggregations.MAX; +import static org.influxdb.querybuilder.Aggregations.MEAN; import static org.influxdb.querybuilder.Aggregations.MIN; import static org.influxdb.querybuilder.Aggregations.SUM; -import static org.influxdb.querybuilder.Aggregations.MEAN; public final class FunctionFactory { private FunctionFactory() { - } public static Object function(final String name, final Object... parameters) { diff --git a/src/main/java/org/influxdb/querybuilder/Ordering.java b/src/main/java/org/influxdb/querybuilder/Ordering.java index 5546a47f3..a90176852 100644 --- a/src/main/java/org/influxdb/querybuilder/Ordering.java +++ b/src/main/java/org/influxdb/querybuilder/Ordering.java @@ -8,6 +8,7 @@ public class Ordering implements Appendable { /** * Influxdb ordering currently supports only time. + * * @param isDesc */ public Ordering(final boolean isDesc) { diff --git a/src/main/java/org/influxdb/querybuilder/Select.java b/src/main/java/org/influxdb/querybuilder/Select.java index b55c20434..629a7ebdc 100644 --- a/src/main/java/org/influxdb/querybuilder/Select.java +++ b/src/main/java/org/influxdb/querybuilder/Select.java @@ -117,7 +117,7 @@ public Select limit(final int limit) { public Select limit(final int limit, final long offSet) { if (limit <= 0 || offSet <= 0) { throw new IllegalArgumentException( - "Invalid LIMIT and OFFSET Value, must be strictly positive"); + "Invalid LIMIT and OFFSET Value, must be strictly positive"); } this.limit = limit; diff --git a/src/main/java/org/influxdb/querybuilder/clauses/RegexClause.java b/src/main/java/org/influxdb/querybuilder/clauses/RegexClause.java index 9ccdc8bd4..c20d1a429 100644 --- a/src/main/java/org/influxdb/querybuilder/clauses/RegexClause.java +++ b/src/main/java/org/influxdb/querybuilder/clauses/RegexClause.java @@ -13,7 +13,7 @@ public RegexClause(final String name, final String value) { this.value = new RawText(value); if (value == null) { - throw new IllegalArgumentException("Missing value for regex clause"); + throw new IllegalArgumentException("Missing value for regex clause"); } } From e42095568303b95c67badefc7337d3f99c7aea3a Mon Sep 17 00:00:00 2001 From: gkatzioura Date: Fri, 31 Aug 2018 22:10:42 +0100 Subject: [PATCH 266/745] Added examples on readme --- README.md | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/README.md b/README.md index de3890308..3c7a9cae2 100644 --- a/README.md +++ b/README.md @@ -250,6 +250,38 @@ List cpuList = resultMapper.toPOJO(queryResult, Cpu.class); - A Class field annotated with _@Column(..., tag = true)_ (i.e. a [InfluxDB Tag](https://docs.influxdata.com/influxdb/v1.2/concepts/glossary/#tag-value)) must be declared as _String_. -- _Note: With the current released version (2.7), InfluxDBResultMapper does not support QueryResult created by queries using the "GROUP BY" clause. This was fixed by [PR #345](https://github.com/influxdata/influxdb-java/pull/345)._ +#### QueryBuilder: +An alternative way to create InfluxDB queries is not available. + +Supposing that you have a measurement _CPU_: +``` +> INSERT cpu,host=serverA,region=us_west idle=0.64,happydevop=false,uptimesecs=123456789i +> +> select * from cpu +name: cpu +time happydevop host idle region uptimesecs +---- ---------- ---- ---- ------ ---------- +2017-06-20T15:32:46.202829088Z false serverA 0.64 us_west 123456789 +``` + +Create query that selects all fields with a limit + +```java +> Query select = select().from(dbName, "cpu").groupBy("host", "region").limit(1); +``` + +Create query with aggregations + +```java +> Query select = select().max("idle").sum("uptimesecs").from(dbName, "cpu"); +``` + +Use your expressions using raw text + +```java +> Query select = select().raw("an expression on select").from(dbName, "cpu").where("an expression as condition"); +``` + #### Query using Callbacks (version 2.8+ required) influxdb-java now supports returning results of a query via callbacks. Only one From e72ef01e29684fbaeed957357489911f5d9c951e Mon Sep 17 00:00:00 2001 From: gkatzioura Date: Fri, 31 Aug 2018 22:11:56 +0100 Subject: [PATCH 267/745] Changed typo --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 3c7a9cae2..f4962a27d 100644 --- a/README.md +++ b/README.md @@ -251,7 +251,7 @@ List cpuList = resultMapper.toPOJO(queryResult, Cpu.class); -- _Note: With the current released version (2.7), InfluxDBResultMapper does not support QueryResult created by queries using the "GROUP BY" clause. This was fixed by [PR #345](https://github.com/influxdata/influxdb-java/pull/345)._ #### QueryBuilder: -An alternative way to create InfluxDB queries is not available. +An alternative way to create InfluxDB queries is available. Supposing that you have a measurement _CPU_: ``` From 55df35b9fd7cb6b4c1ceed5b45e41b38f5d839b4 Mon Sep 17 00:00:00 2001 From: gkatzioura Date: Fri, 31 Aug 2018 22:15:20 +0100 Subject: [PATCH 268/745] Change raw text documentation --- README.md | 2 +- src/main/java/org/influxdb/dto/Query.java | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/README.md b/README.md index f4962a27d..b170f5172 100644 --- a/README.md +++ b/README.md @@ -276,7 +276,7 @@ Create query with aggregations > Query select = select().max("idle").sum("uptimesecs").from(dbName, "cpu"); ``` -Use your expressions using raw text +Use expressions using raw text ```java > Query select = select().raw("an expression on select").from(dbName, "cpu").where("an expression as condition"); diff --git a/src/main/java/org/influxdb/dto/Query.java b/src/main/java/org/influxdb/dto/Query.java index 328470ef2..503b120a8 100644 --- a/src/main/java/org/influxdb/dto/Query.java +++ b/src/main/java/org/influxdb/dto/Query.java @@ -108,5 +108,4 @@ public static String encode(final String command) { throw new IllegalStateException("Every JRE must support UTF-8", e); } } - } From 5a0c0613e80a878fd95fdfbcbfd466b88c69882a Mon Sep 17 00:00:00 2001 From: gkatzioura Date: Fri, 31 Aug 2018 22:17:12 +0100 Subject: [PATCH 269/745] Changed to 'Create queries using raw text' --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index b170f5172..4a2f45c9a 100644 --- a/README.md +++ b/README.md @@ -276,7 +276,7 @@ Create query with aggregations > Query select = select().max("idle").sum("uptimesecs").from(dbName, "cpu"); ``` -Use expressions using raw text +Create queries using raw text ```java > Query select = select().raw("an expression on select").from(dbName, "cpu").where("an expression as condition"); From c9df0eb93c7177fbcb3c603b3d85a6bd12ab2441 Mon Sep 17 00:00:00 2001 From: gkatzioura Date: Fri, 31 Aug 2018 22:43:01 +0100 Subject: [PATCH 270/745] Removed to string --- src/main/java/org/influxdb/querybuilder/Column.java | 4 ---- src/main/java/org/influxdb/querybuilder/Distinct.java | 4 ---- 2 files changed, 8 deletions(-) diff --git a/src/main/java/org/influxdb/querybuilder/Column.java b/src/main/java/org/influxdb/querybuilder/Column.java index cac0f70a5..f9185ce7e 100644 --- a/src/main/java/org/influxdb/querybuilder/Column.java +++ b/src/main/java/org/influxdb/querybuilder/Column.java @@ -12,8 +12,4 @@ public String getName() { return name; } - @Override - public String toString() { - return name; - } } diff --git a/src/main/java/org/influxdb/querybuilder/Distinct.java b/src/main/java/org/influxdb/querybuilder/Distinct.java index 9521927b3..81d1b6508 100644 --- a/src/main/java/org/influxdb/querybuilder/Distinct.java +++ b/src/main/java/org/influxdb/querybuilder/Distinct.java @@ -13,8 +13,4 @@ public Object getExpression() { return expression; } - @Override - public String toString() { - return String.format("DISTINCT %s", expression); - } } From 6ea87e0be93b1bb8cab26ab43b141296885f8223 Mon Sep 17 00:00:00 2001 From: gkatzioura Date: Fri, 31 Aug 2018 22:58:36 +0100 Subject: [PATCH 271/745] Added test for exception on raw expression --- .../java/org/influxdb/querybuilder/Appender.java | 8 ++++---- .../influxdb/querybuilder/BuiltQueryDecorator.java | 4 ---- .../java/org/influxdb/querybuilder/Function.java | 13 ------------- .../org/influxdb/querybuilder/BuiltQueryTest.java | 6 ++++++ 4 files changed, 10 insertions(+), 21 deletions(-) diff --git a/src/main/java/org/influxdb/querybuilder/Appender.java b/src/main/java/org/influxdb/querybuilder/Appender.java index 92b8fe5e4..d2347057e 100644 --- a/src/main/java/org/influxdb/querybuilder/Appender.java +++ b/src/main/java/org/influxdb/querybuilder/Appender.java @@ -38,13 +38,13 @@ public static StringBuilder appendValue(final Object value, final StringBuilder if (value == null) { stringBuilder.append("null"); } else if (value instanceof Function) { - Function fcall = (Function) value; - stringBuilder.append(fcall.getName()).append('('); - for (int i = 0; i < fcall.getParameters().length; i++) { + Function functionCall = (Function) value; + stringBuilder.append(functionCall.getName()).append('('); + for (int i = 0; i < functionCall.getParameters().length; i++) { if (i > 0) { stringBuilder.append(','); } - appendValue(fcall.getParameters()[i], stringBuilder); + appendValue(functionCall.getParameters()[i], stringBuilder); } stringBuilder.append(')'); } else if (value instanceof Column) { diff --git a/src/main/java/org/influxdb/querybuilder/BuiltQueryDecorator.java b/src/main/java/org/influxdb/querybuilder/BuiltQueryDecorator.java index 6cb063887..99a13fc7e 100644 --- a/src/main/java/org/influxdb/querybuilder/BuiltQueryDecorator.java +++ b/src/main/java/org/influxdb/querybuilder/BuiltQueryDecorator.java @@ -24,8 +24,4 @@ public String getDatabase() { return query.getDatabase(); } - @Override - public String toString() { - return query.toString(); - } } diff --git a/src/main/java/org/influxdb/querybuilder/Function.java b/src/main/java/org/influxdb/querybuilder/Function.java index 1e29c041b..c21f272b9 100644 --- a/src/main/java/org/influxdb/querybuilder/Function.java +++ b/src/main/java/org/influxdb/querybuilder/Function.java @@ -18,17 +18,4 @@ public Object[] getParameters() { return parameters; } - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(name).append('('); - for (int i = 0; i < parameters.length; i++) { - if (i > 0) { - sb.append(','); - } - sb.append(parameters[i]); - } - sb.append(')'); - return sb.toString(); - } } diff --git a/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java b/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java index 741301d52..455d7802b 100644 --- a/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java +++ b/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java @@ -94,6 +94,12 @@ public void testRawExpressionInWhere() { assertEquals(query.getDatabase(), select.getDatabase()); } + @Test + public void testRawExpressionEmptyValue() { + Query select = select().all().from(DATABASE, "foobar").where(ne("k", raw(null))); + assertThrows(IllegalArgumentException.class, () -> select.getCommand(), "Missing text for expression"); + } + @Test public void testOrderingAsc() { Query query = From 4dae48634854a4ab591c3dbdea15468fbd7d4b68 Mon Sep 17 00:00:00 2001 From: gkatzioura Date: Fri, 31 Aug 2018 23:03:26 +0100 Subject: [PATCH 272/745] Added test on invalid limit --- src/main/java/org/influxdb/querybuilder/Select.java | 4 ---- .../org/influxdb/querybuilder/BuiltQueryTest.java | 11 +++++++++++ 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/src/main/java/org/influxdb/querybuilder/Select.java b/src/main/java/org/influxdb/querybuilder/Select.java index 629a7ebdc..7e66b49ab 100644 --- a/src/main/java/org/influxdb/querybuilder/Select.java +++ b/src/main/java/org/influxdb/querybuilder/Select.java @@ -86,10 +86,6 @@ public Where where(final String text) { return where.and(new RawTextClause(text)); } - public Where where() { - return where; - } - public Select orderBy(final Ordering ordering) { this.ordering = ordering; diff --git a/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java b/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java index 455d7802b..cf70b4bf4 100644 --- a/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java +++ b/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java @@ -254,6 +254,17 @@ public void testLimit() { assertEquals(query.getDatabase(), select.getDatabase()); } + @Test + public void testInvalidLimit() { + Query select = + select().column("test1").from(DATABASE, "foobar").groupBy("test2", "test3").limit(-1); + + assertThrows( + IllegalArgumentException.class, + () -> select.getCommand(), + "Invalid LIMIT value, must be strictly positive"); + } + @Test public void testLimitOffset() { Query query = From 4da6bef3ba327d16f8548b886c23c07c0389c2bd Mon Sep 17 00:00:00 2001 From: gkatzioura Date: Fri, 31 Aug 2018 23:14:38 +0100 Subject: [PATCH 273/745] Added invalid limit test --- .../influxdb/querybuilder/BuiltQueryTest.java | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java b/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java index cf70b4bf4..0f6224d09 100644 --- a/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java +++ b/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java @@ -67,6 +67,16 @@ public void testDistinctWithExpression() { assertEquals(query.getDatabase(), select.getDatabase()); } + @Test + public void testDistinctWithMultipleSelectedColumns() { + Query select = select().column("test1").column("test2").distinct().from(DATABASE, "foobar").limit(1, 20); + + assertThrows( + IllegalStateException.class, + () -> select.getCommand(), + "DISTINCT function can only be used with one column"); + } + @Test public void testMultipleColumns() { Query query = select().column("test1").distinct().column("test2").from(DATABASE, "foobar"); @@ -96,8 +106,8 @@ public void testRawExpressionInWhere() { @Test public void testRawExpressionEmptyValue() { - Query select = select().all().from(DATABASE, "foobar").where(ne("k", raw(null))); - assertThrows(IllegalArgumentException.class, () -> select.getCommand(), "Missing text for expression"); + String rawTextClause = null; + assertThrows(IllegalArgumentException.class, () -> select().all().from(DATABASE, "foobar").where(rawTextClause), "Missing text for expression"); } @Test @@ -256,12 +266,9 @@ public void testLimit() { @Test public void testInvalidLimit() { - Query select = - select().column("test1").from(DATABASE, "foobar").groupBy("test2", "test3").limit(-1); - assertThrows( IllegalArgumentException.class, - () -> select.getCommand(), + () -> select().column("test1").from(DATABASE, "foobar").groupBy("test2", "test3").limit(-1), "Invalid LIMIT value, must be strictly positive"); } From 216304f8758998f14feef34ba95d838d2f469662 Mon Sep 17 00:00:00 2001 From: gkatzioura Date: Fri, 31 Aug 2018 23:18:33 +0100 Subject: [PATCH 274/745] Added test on distinct without columns selected --- .../java/org/influxdb/querybuilder/BuiltQueryTest.java | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java b/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java index 0f6224d09..ab452d6ad 100644 --- a/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java +++ b/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java @@ -77,6 +77,14 @@ public void testDistinctWithMultipleSelectedColumns() { "DISTINCT function can only be used with one column"); } + @Test + public void testDistinctWithoutSelectedColumns() { + assertThrows( + IllegalStateException.class, + () -> select().distinct().from(DATABASE, "foobar").limit(1, 20), + "DISTINCT function can only be used with one column"); + } + @Test public void testMultipleColumns() { Query query = select().column("test1").distinct().column("test2").from(DATABASE, "foobar"); From c9bd99bde34390e84cf535be567905c1a3c1114a Mon Sep 17 00:00:00 2001 From: gkatzioura Date: Fri, 31 Aug 2018 23:32:47 +0100 Subject: [PATCH 275/745] Added test on invalid negative regex --- src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java b/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java index ab452d6ad..4aec2abe7 100644 --- a/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java +++ b/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java @@ -5,6 +5,7 @@ import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; +import com.sun.javaws.exceptions.InvalidArgumentException; import org.influxdb.dto.Query; import org.junit.jupiter.api.Test; @@ -39,6 +40,11 @@ public void testNegativeRegex() { assertEquals(query.getDatabase(), select.getDatabase()); } + @Test + public void testInvalidNegativeRegex() { + assertThrows(IllegalArgumentException.class, () -> select().max("k").from(DATABASE, "foobar").where(nregex("k", null)),"Missing text for expression"); + } + @Test public void testContains() { Query query = new Query("SELECT MAX(k) FROM foobar WHERE k =~ /text/;", DATABASE); From bf503a8e18b32f01f56ee65342302860ba580abb Mon Sep 17 00:00:00 2001 From: gkatzioura Date: Fri, 31 Aug 2018 23:38:27 +0100 Subject: [PATCH 276/745] Removed invalid import --- src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java | 1 - 1 file changed, 1 deletion(-) diff --git a/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java b/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java index 4aec2abe7..9b651fa6a 100644 --- a/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java +++ b/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java @@ -5,7 +5,6 @@ import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; -import com.sun.javaws.exceptions.InvalidArgumentException; import org.influxdb.dto.Query; import org.junit.jupiter.api.Test; From 5b5299fd128d671e79efac8c3b5f5860dc4f58a1 Mon Sep 17 00:00:00 2001 From: gkatzioura Date: Fri, 31 Aug 2018 23:45:20 +0100 Subject: [PATCH 277/745] Added encoded test --- .../java/org/influxdb/querybuilder/BuiltQueryTest.java | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java b/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java index 9b651fa6a..9789f9374 100644 --- a/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java +++ b/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java @@ -12,6 +12,14 @@ public class BuiltQueryTest { private static final String DATABASE = "testdb"; + @Test + public void testCommandWithUrlEncoded() { + Query select = select().max("k").as("hello").from(DATABASE, "foobar"); + String encoded = select.getCommandWithUrlEncoded(); + + assertEquals("SELECT+MAX%28k%29+AS+hello+FROM+foobar%3B", encoded); + } + @Test public void testAlias() { Query query = new Query("SELECT MAX(k) AS hello FROM foobar;", DATABASE); From 21b891d90754bb731b26eec1a41fdc13ba8d65b8 Mon Sep 17 00:00:00 2001 From: gkatzioura Date: Fri, 31 Aug 2018 23:51:06 +0100 Subject: [PATCH 278/745] Added lt and gt test --- .../org/influxdb/querybuilder/BuiltQueryTest.java | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java b/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java index 9789f9374..f65960314 100644 --- a/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java +++ b/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java @@ -182,6 +182,20 @@ public void testSelect() { assertEquals(query.getDatabase(), select.getDatabase()); } + @Test + public void testSelectLtGte() { + Query query = new Query("SELECT * FROM foobar WHERE k<4 AND c>='a';", DATABASE); + Query select = + select() + .all() + .from(DATABASE, "foobar") + .where(lt("k", 4)) + .and(gte("c", "a")); + + assertEquals(query.getCommand(), select.getCommand()); + assertEquals(query.getDatabase(), select.getDatabase()); + } + @Test public void testMean() { Query query = new Query("SELECT MEAN(k) FROM foobar WHERE k=4 AND c>'a' AND c<='z';", DATABASE); From 3f3045bc99bebb1639bf1a1469ae4f94f19c21b8 Mon Sep 17 00:00:00 2001 From: gkatzioura Date: Fri, 31 Aug 2018 23:58:25 +0100 Subject: [PATCH 279/745] Added test for invalid regex --- src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java b/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java index f65960314..5e268ce14 100644 --- a/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java +++ b/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java @@ -38,6 +38,11 @@ public void testRegex() { assertEquals(query.getDatabase(), select.getDatabase()); } + @Test + public void testInvalidRegex() { + assertThrows(IllegalArgumentException.class, () -> select().max("k").from(DATABASE, "foobar").where(regex("k", null)),"Missing text for expression"); + } + @Test public void testNegativeRegex() { Query query = new Query("SELECT MAX(k) FROM foobar WHERE k !~ /[0-9]/;", DATABASE); From 1e020a574dcf0883b9c6176ce4ebecbd88fe60a4 Mon Sep 17 00:00:00 2001 From: gkatzioura Date: Sat, 1 Sep 2018 00:01:07 +0100 Subject: [PATCH 280/745] removed unused functions --- .../java/org/influxdb/querybuilder/BuiltQuery.java | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/src/main/java/org/influxdb/querybuilder/BuiltQuery.java b/src/main/java/org/influxdb/querybuilder/BuiltQuery.java index 2d68d1312..2cb8d1992 100644 --- a/src/main/java/org/influxdb/querybuilder/BuiltQuery.java +++ b/src/main/java/org/influxdb/querybuilder/BuiltQuery.java @@ -145,18 +145,6 @@ public static Object min(final Object column) { return FunctionFactory.min(column); } - public static Object sum(final Object column) { - return FunctionFactory.sum(column); - } - - public static Object mean(final Object column) { - return FunctionFactory.mean(column); - } - - public static Object column(final String name) { - return FunctionFactory.column(name); - } - public static Object now() { return FunctionFactory.now(); } From 6bd420cc3c18ecad66cfa1b2fb344486eda8ed67 Mon Sep 17 00:00:00 2001 From: gkatzioura Date: Sat, 1 Sep 2018 00:05:02 +0100 Subject: [PATCH 281/745] removed count from BuiltQuery --- src/main/java/org/influxdb/querybuilder/BuiltQuery.java | 4 ---- 1 file changed, 4 deletions(-) diff --git a/src/main/java/org/influxdb/querybuilder/BuiltQuery.java b/src/main/java/org/influxdb/querybuilder/BuiltQuery.java index 2cb8d1992..51c88751c 100644 --- a/src/main/java/org/influxdb/querybuilder/BuiltQuery.java +++ b/src/main/java/org/influxdb/querybuilder/BuiltQuery.java @@ -133,10 +133,6 @@ public static Object raw(final String str) { return new RawText(str); } - public static Object count(final Object column) { - return FunctionFactory.count(column); - } - public static Object max(final Object column) { return FunctionFactory.max(column); } From b5248f6a04be4a0430555c5416dca1877a4698b3 Mon Sep 17 00:00:00 2001 From: gkatzioura Date: Sat, 1 Sep 2018 00:22:12 +0100 Subject: [PATCH 282/745] Added test on all --- .../influxdb/querybuilder/BuiltQueryTest.java | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java b/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java index 5e268ce14..8ab7f541d 100644 --- a/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java +++ b/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java @@ -114,13 +114,28 @@ public void testMultipleColumns() { @Test public void testNonEqual() { - Query query = new Query("SELECT * FROM foobar WHERE k!=4;", DATABASE); - Query select = select().all().from(DATABASE, "foobar").where(ne("k", 4)); + Query query = new Query("SELECT * FROM foobar WHERE test1!=4;", DATABASE); + Query select = select().all().from(DATABASE, "foobar").where(ne("test1", 4)); assertEquals(query.getCommand(), select.getCommand()); assertEquals(query.getDatabase(), select.getDatabase()); } + @Test + public void testSelectAllWithColumn() { + assertThrows(IllegalStateException.class, () -> select().column("test1").all().from(DATABASE, "foobar").where(ne("k", raw("raw expression"))),"Can't select all columns over columns selected previously"); + } + + @Test + public void testSelectAllWithColumns() { + assertThrows(IllegalStateException.class, () -> select().column("test1").column("test2").all().from(DATABASE, "foobar").where(ne("k", raw("raw expression"))),"Can't select all columns over columns selected previously"); + } + + @Test + public void testSelectAllWithDistinct() { + assertThrows(IllegalStateException.class, () -> select().column("test1").distinct().all().from(DATABASE, "foobar").where(ne("k", raw("raw expression"))),"Can't select all columns over columns selected previously"); + } + @Test public void testRawExpressionInWhere() { Query query = new Query("SELECT * FROM foobar WHERE k!=raw expression;", DATABASE); From 462f95189f82e9e4d28f14866a3975ce652e9de5 Mon Sep 17 00:00:00 2001 From: gkatzioura Date: Sat, 1 Sep 2018 00:26:06 +0100 Subject: [PATCH 283/745] enforcing database selection --- src/main/java/org/influxdb/querybuilder/Select.java | 4 ---- 1 file changed, 4 deletions(-) diff --git a/src/main/java/org/influxdb/querybuilder/Select.java b/src/main/java/org/influxdb/querybuilder/Select.java index 7e66b49ab..bbe92454b 100644 --- a/src/main/java/org/influxdb/querybuilder/Select.java +++ b/src/main/java/org/influxdb/querybuilder/Select.java @@ -169,10 +169,6 @@ public Builder requiresPost() { return this; } - public Select from(final String table) { - return from(null, table); - } - public Select from(final String database, final String table) { return new Select(database, table, columns, isDistinct, requiresPost); } From 9533297544b66de541ce4c961045c018c4e18671 Mon Sep 17 00:00:00 2001 From: gkatzioura Date: Sat, 1 Sep 2018 00:27:58 +0100 Subject: [PATCH 284/745] Forcing database selection --- .../java/org/influxdb/querybuilder/Selection.java | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/src/main/java/org/influxdb/querybuilder/Selection.java b/src/main/java/org/influxdb/querybuilder/Selection.java index 324c39686..913de5e02 100644 --- a/src/main/java/org/influxdb/querybuilder/Selection.java +++ b/src/main/java/org/influxdb/querybuilder/Selection.java @@ -112,20 +112,12 @@ public Selection mean(final Object column) { } @Override - public Select from(final String keyspace, final String table) { + public Select from(final String database, final String table) { if (currentSelection != null) { moveToColumns(currentSelection); } currentSelection = null; - return super.from(keyspace, table); + return super.from(database, table); } - @Override - public Select from(final String table) { - if (currentSelection != null) { - moveToColumns(currentSelection); - } - currentSelection = null; - return super.from(table); - } } From bcda6268d658259cf9be4955965d66d199e762a0 Mon Sep 17 00:00:00 2001 From: gkatzioura Date: Sat, 1 Sep 2018 00:35:42 +0100 Subject: [PATCH 285/745] Added tests on count all --- .../java/org/influxdb/querybuilder/BuiltQueryTest.java | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java b/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java index 8ab7f541d..bd15c8037 100644 --- a/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java +++ b/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java @@ -450,6 +450,16 @@ public void testCountAll() { assertEquals(query.getDatabase(), select.getDatabase()); } + @Test + public void testCountAllWithColumn() { + assertThrows(IllegalStateException.class, () -> select().column("test1").countAll().from(DATABASE, "foobar"),"Can't count all with previously selected columns"); + } + + @Test + public void testCountAllWithColumns() { + assertThrows(IllegalStateException.class, () -> select().column("test1").column("test2").countAll().from(DATABASE, "foobar"),"Can't count all with previously selected columns"); + } + @Test public void testRequiresPost() { Query select = select().requiresPost().countAll().from(DATABASE, "foobar"); From d52dbb4ec6ff44237f799f04ef74ef5e561b193f Mon Sep 17 00:00:00 2001 From: gkatzioura Date: Sat, 1 Sep 2018 09:00:51 +0100 Subject: [PATCH 286/745] Applied formatting --- .../querybuilder/BuiltQueryDecorator.java | 1 - .../org/influxdb/querybuilder/Column.java | 1 - .../org/influxdb/querybuilder/Distinct.java | 1 - .../org/influxdb/querybuilder/Function.java | 1 - .../org/influxdb/querybuilder/Selection.java | 1 - .../influxdb/querybuilder/BuiltQueryTest.java | 198 +++++++++++------- 6 files changed, 124 insertions(+), 79 deletions(-) diff --git a/src/main/java/org/influxdb/querybuilder/BuiltQueryDecorator.java b/src/main/java/org/influxdb/querybuilder/BuiltQueryDecorator.java index 99a13fc7e..c5cbe1b92 100644 --- a/src/main/java/org/influxdb/querybuilder/BuiltQueryDecorator.java +++ b/src/main/java/org/influxdb/querybuilder/BuiltQueryDecorator.java @@ -23,5 +23,4 @@ StringBuilder buildQueryString() { public String getDatabase() { return query.getDatabase(); } - } diff --git a/src/main/java/org/influxdb/querybuilder/Column.java b/src/main/java/org/influxdb/querybuilder/Column.java index f9185ce7e..ac70cbf33 100644 --- a/src/main/java/org/influxdb/querybuilder/Column.java +++ b/src/main/java/org/influxdb/querybuilder/Column.java @@ -11,5 +11,4 @@ public class Column { public String getName() { return name; } - } diff --git a/src/main/java/org/influxdb/querybuilder/Distinct.java b/src/main/java/org/influxdb/querybuilder/Distinct.java index 81d1b6508..4fafc928d 100644 --- a/src/main/java/org/influxdb/querybuilder/Distinct.java +++ b/src/main/java/org/influxdb/querybuilder/Distinct.java @@ -12,5 +12,4 @@ public class Distinct { public Object getExpression() { return expression; } - } diff --git a/src/main/java/org/influxdb/querybuilder/Function.java b/src/main/java/org/influxdb/querybuilder/Function.java index c21f272b9..d3e598c3c 100644 --- a/src/main/java/org/influxdb/querybuilder/Function.java +++ b/src/main/java/org/influxdb/querybuilder/Function.java @@ -17,5 +17,4 @@ public String getName() { public Object[] getParameters() { return parameters; } - } diff --git a/src/main/java/org/influxdb/querybuilder/Selection.java b/src/main/java/org/influxdb/querybuilder/Selection.java index 913de5e02..d74542001 100644 --- a/src/main/java/org/influxdb/querybuilder/Selection.java +++ b/src/main/java/org/influxdb/querybuilder/Selection.java @@ -119,5 +119,4 @@ public Select from(final String database, final String table) { currentSelection = null; return super.from(database, table); } - } diff --git a/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java b/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java index bd15c8037..3641b866b 100644 --- a/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java +++ b/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java @@ -14,16 +14,16 @@ public class BuiltQueryTest { @Test public void testCommandWithUrlEncoded() { - Query select = select().max("k").as("hello").from(DATABASE, "foobar"); + Query select = select().max("test1").as("hello").from(DATABASE, "foobar"); String encoded = select.getCommandWithUrlEncoded(); - assertEquals("SELECT+MAX%28k%29+AS+hello+FROM+foobar%3B", encoded); + assertEquals("SELECT+MAX%28test1%29+AS+hello+FROM+foobar%3B", encoded); } @Test public void testAlias() { - Query query = new Query("SELECT MAX(k) AS hello FROM foobar;", DATABASE); - Query select = select().max("k").as("hello").from(DATABASE, "foobar"); + Query query = new Query("SELECT MAX(test1) AS hello FROM foobar;", DATABASE); + Query select = select().max("test1").as("hello").from(DATABASE, "foobar"); assertEquals(query.getCommand(), select.getCommand()); assertEquals(query.getDatabase(), select.getDatabase()); @@ -31,8 +31,8 @@ public void testAlias() { @Test public void testRegex() { - Query query = new Query("SELECT MAX(k) FROM foobar WHERE k =~ /[0-9]/;", DATABASE); - Query select = select().max("k").from(DATABASE, "foobar").where(regex("k", "/[0-9]/")); + Query query = new Query("SELECT MAX(test1) FROM foobar WHERE test1 =~ /[0-9]/;", DATABASE); + Query select = select().max("test1").from(DATABASE, "foobar").where(regex("test1", "/[0-9]/")); assertEquals(query.getCommand(), select.getCommand()); assertEquals(query.getDatabase(), select.getDatabase()); @@ -40,13 +40,16 @@ public void testRegex() { @Test public void testInvalidRegex() { - assertThrows(IllegalArgumentException.class, () -> select().max("k").from(DATABASE, "foobar").where(regex("k", null)),"Missing text for expression"); + assertThrows( + IllegalArgumentException.class, + () -> select().max("test1").from(DATABASE, "foobar").where(regex("test1", null)), + "Missing text for expression"); } @Test public void testNegativeRegex() { - Query query = new Query("SELECT MAX(k) FROM foobar WHERE k !~ /[0-9]/;", DATABASE); - Query select = select().max("k").from(DATABASE, "foobar").where(nregex("k", "/[0-9]/")); + Query query = new Query("SELECT MAX(test1) FROM foobar WHERE test1 !~ /[0-9]/;", DATABASE); + Query select = select().max("test1").from(DATABASE, "foobar").where(nregex("test1", "/[0-9]/")); assertEquals(query.getCommand(), select.getCommand()); assertEquals(query.getDatabase(), select.getDatabase()); @@ -54,13 +57,16 @@ public void testNegativeRegex() { @Test public void testInvalidNegativeRegex() { - assertThrows(IllegalArgumentException.class, () -> select().max("k").from(DATABASE, "foobar").where(nregex("k", null)),"Missing text for expression"); + assertThrows( + IllegalArgumentException.class, + () -> select().max("test1").from(DATABASE, "foobar").where(nregex("test1", null)), + "Missing text for expression"); } @Test public void testContains() { - Query query = new Query("SELECT MAX(k) FROM foobar WHERE k =~ /text/;", DATABASE); - Query select = select().max("k").from(DATABASE, "foobar").where(contains("k", "text")); + Query query = new Query("SELECT MAX(test1) FROM foobar WHERE test1 =~ /text/;", DATABASE); + Query select = select().max("test1").from(DATABASE, "foobar").where(contains("test1", "text")); assertEquals(query.getCommand(), select.getCommand()); assertEquals(query.getDatabase(), select.getDatabase()); @@ -68,8 +74,8 @@ public void testContains() { @Test public void testDistinct() { - Query query = new Query("SELECT DISTINCT k FROM foobar;", DATABASE); - Query select = select().column("k").distinct().from(DATABASE, "foobar"); + Query query = new Query("SELECT DISTINCT test1 FROM foobar;", DATABASE); + Query select = select().column("test1").distinct().from(DATABASE, "foobar"); assertEquals(query.getCommand(), select.getCommand()); assertEquals(query.getDatabase(), select.getDatabase()); @@ -87,20 +93,21 @@ public void testDistinctWithExpression() { @Test public void testDistinctWithMultipleSelectedColumns() { - Query select = select().column("test1").column("test2").distinct().from(DATABASE, "foobar").limit(1, 20); + Query select = + select().column("test1").column("test2").distinct().from(DATABASE, "foobar").limit(1, 20); assertThrows( - IllegalStateException.class, - () -> select.getCommand(), - "DISTINCT function can only be used with one column"); + IllegalStateException.class, + () -> select.getCommand(), + "DISTINCT function can only be used with one column"); } @Test public void testDistinctWithoutSelectedColumns() { assertThrows( - IllegalStateException.class, - () -> select().distinct().from(DATABASE, "foobar").limit(1, 20), - "DISTINCT function can only be used with one column"); + IllegalStateException.class, + () -> select().distinct().from(DATABASE, "foobar").limit(1, 20), + "DISTINCT function can only be used with one column"); } @Test @@ -123,23 +130,50 @@ public void testNonEqual() { @Test public void testSelectAllWithColumn() { - assertThrows(IllegalStateException.class, () -> select().column("test1").all().from(DATABASE, "foobar").where(ne("k", raw("raw expression"))),"Can't select all columns over columns selected previously"); + assertThrows( + IllegalStateException.class, + () -> + select() + .column("test1") + .all() + .from(DATABASE, "foobar") + .where(ne("test1", raw("raw expression"))), + "Can't select all columns over columns selected previously"); } @Test public void testSelectAllWithColumns() { - assertThrows(IllegalStateException.class, () -> select().column("test1").column("test2").all().from(DATABASE, "foobar").where(ne("k", raw("raw expression"))),"Can't select all columns over columns selected previously"); + assertThrows( + IllegalStateException.class, + () -> + select() + .column("test1") + .column("test2") + .all() + .from(DATABASE, "foobar") + .where(ne("test1", raw("raw expression"))), + "Can't select all columns over columns selected previously"); } @Test public void testSelectAllWithDistinct() { - assertThrows(IllegalStateException.class, () -> select().column("test1").distinct().all().from(DATABASE, "foobar").where(ne("k", raw("raw expression"))),"Can't select all columns over columns selected previously"); + assertThrows( + IllegalStateException.class, + () -> + select() + .column("test1") + .distinct() + .all() + .from(DATABASE, "foobar") + .where(ne("test1", raw("raw expression"))), + "Can't select all columns over columns selected previously"); } @Test public void testRawExpressionInWhere() { - Query query = new Query("SELECT * FROM foobar WHERE k!=raw expression;", DATABASE); - Query select = select().all().from(DATABASE, "foobar").where(ne("k", raw("raw expression"))); + Query query = new Query("SELECT * FROM foobar WHERE test1!=raw expression;", DATABASE); + Query select = + select().all().from(DATABASE, "foobar").where(ne("test1", raw("raw expression"))); assertEquals(query.getCommand(), select.getCommand()); assertEquals(query.getDatabase(), select.getDatabase()); @@ -148,21 +182,25 @@ public void testRawExpressionInWhere() { @Test public void testRawExpressionEmptyValue() { String rawTextClause = null; - assertThrows(IllegalArgumentException.class, () -> select().all().from(DATABASE, "foobar").where(rawTextClause), "Missing text for expression"); + assertThrows( + IllegalArgumentException.class, + () -> select().all().from(DATABASE, "foobar").where(rawTextClause), + "Missing text for expression"); } @Test public void testOrderingAsc() { Query query = new Query( - "SELECT * FROM foobar WHERE k=4 AND c>'a' AND c<='z' ORDER BY time ASC;", DATABASE); + "SELECT * FROM foobar WHERE test1=4 AND test2>'a' AND test2<='z' ORDER BY time ASC;", + DATABASE); Query select = select() .all() .from(DATABASE, "foobar") - .where(eq("k", 4)) - .and(gt("c", "a")) - .and(lte("c", "z")) + .where(eq("test1", 4)) + .and(gt("test2", "a")) + .and(lte("test2", "z")) .orderBy(asc()); assertEquals(query.getCommand(), select.getCommand()); @@ -173,14 +211,15 @@ public void testOrderingAsc() { public void testOrderingDesc() { Query query = new Query( - "SELECT * FROM foobar WHERE k=4 AND c>'a' AND c<='z' ORDER BY time DESC;", DATABASE); + "SELECT * FROM foobar WHERE test1=4 AND test2>'a' AND test2<='z' ORDER BY time DESC;", + DATABASE); Query select = select() .all() .from(DATABASE, "foobar") - .where(eq("k", 4)) - .and(gt("c", "a")) - .and(lte("c", "z")) + .where(eq("test1", 4)) + .and(gt("test2", "a")) + .and(lte("test2", "z")) .orderBy(desc()); assertEquals(query.getCommand(), select.getCommand()); @@ -189,14 +228,15 @@ public void testOrderingDesc() { @Test public void testSelect() { - Query query = new Query("SELECT * FROM foobar WHERE k=4 AND c>'a' AND c<='z';", DATABASE); + Query query = + new Query("SELECT * FROM foobar WHERE test1=4 AND test2>'a' AND test2<='z';", DATABASE); Query select = select() .all() .from(DATABASE, "foobar") - .where(eq("k", 4)) - .and(gt("c", "a")) - .and(lte("c", "z")); + .where(eq("test1", 4)) + .and(gt("test2", "a")) + .and(lte("test2", "z")); assertEquals(query.getCommand(), select.getCommand()); assertEquals(query.getDatabase(), select.getDatabase()); @@ -204,13 +244,9 @@ public void testSelect() { @Test public void testSelectLtGte() { - Query query = new Query("SELECT * FROM foobar WHERE k<4 AND c>='a';", DATABASE); + Query query = new Query("SELECT * FROM foobar WHERE test1<4 AND test2>='a';", DATABASE); Query select = - select() - .all() - .from(DATABASE, "foobar") - .where(lt("k", 4)) - .and(gte("c", "a")); + select().all().from(DATABASE, "foobar").where(lt("test1", 4)).and(gte("test2", "a")); assertEquals(query.getCommand(), select.getCommand()); assertEquals(query.getDatabase(), select.getDatabase()); @@ -218,14 +254,16 @@ public void testSelectLtGte() { @Test public void testMean() { - Query query = new Query("SELECT MEAN(k) FROM foobar WHERE k=4 AND c>'a' AND c<='z';", DATABASE); + Query query = + new Query( + "SELECT MEAN(test1) FROM foobar WHERE test1=4 AND test2>'a' AND test2<='z';", DATABASE); Query select = select() - .mean("k") + .mean("test1") .from(DATABASE, "foobar") - .where(eq("k", 4)) - .and(gt("c", "a")) - .and(lte("c", "z")); + .where(eq("test1", 4)) + .and(gt("test2", "a")) + .and(lte("test2", "z")); assertEquals(query.getCommand(), select.getCommand()); assertEquals(query.getDatabase(), select.getDatabase()); @@ -233,14 +271,16 @@ public void testMean() { @Test public void testSum() { - Query query = new Query("SELECT SUM(k) FROM foobar WHERE k=4 AND c>'a' AND c<='z';", DATABASE); + Query query = + new Query( + "SELECT SUM(test1) FROM foobar WHERE test1=4 AND test2>'a' AND test2<='z';", DATABASE); Query select = select() - .sum("k") + .sum("test1") .from(DATABASE, "foobar") - .where(eq("k", 4)) - .and(gt("c", "a")) - .and(lte("c", "z")); + .where(eq("test1", 4)) + .and(gt("test2", "a")) + .and(lte("test2", "z")); assertEquals(query.getCommand(), select.getCommand()); assertEquals(query.getDatabase(), select.getDatabase()); @@ -248,14 +288,16 @@ public void testSum() { @Test public void testMin() { - Query query = new Query("SELECT MIN(k) FROM foobar WHERE k=4 AND c>'a' AND c<='z';", DATABASE); + Query query = + new Query( + "SELECT MIN(test1) FROM foobar WHERE test1=4 AND test2>'a' AND test2<='z';", DATABASE); Query select = select() - .min("k") + .min("test1") .from(DATABASE, "foobar") - .where(eq("k", 4)) - .and(gt("c", "a")) - .and(lte("c", "z")); + .where(eq("test1", 4)) + .and(gt("test2", "a")) + .and(lte("test2", "z")); assertEquals(query.getCommand(), select.getCommand()); assertEquals(query.getDatabase(), select.getDatabase()); @@ -263,14 +305,16 @@ public void testMin() { @Test public void testMax() { - Query query = new Query("SELECT MAX(k) FROM foobar WHERE k=4 AND c>'a' AND c<='z';", DATABASE); + Query query = + new Query( + "SELECT MAX(test1) FROM foobar WHERE test1=4 AND test2>'a' AND test2<='z';", DATABASE); Query select = select() - .max("k") + .max("test1") .from(DATABASE, "foobar") - .where(eq("k", 4)) - .and(gt("c", "a")) - .and(lte("c", "z")); + .where(eq("test1", 4)) + .and(gt("test2", "a")) + .and(lte("test2", "z")); assertEquals(query.getCommand(), select.getCommand()); assertEquals(query.getDatabase(), select.getDatabase()); @@ -319,13 +363,13 @@ public void testLimit() { assertEquals(query.getDatabase(), select.getDatabase()); } - @Test - public void testInvalidLimit() { - assertThrows( - IllegalArgumentException.class, - () -> select().column("test1").from(DATABASE, "foobar").groupBy("test2", "test3").limit(-1), - "Invalid LIMIT value, must be strictly positive"); - } + @Test + public void testInvalidLimit() { + assertThrows( + IllegalArgumentException.class, + () -> select().column("test1").from(DATABASE, "foobar").groupBy("test2", "test3").limit(-1), + "Invalid LIMIT value, must be strictly positive"); + } @Test public void testLimitOffset() { @@ -452,12 +496,18 @@ public void testCountAll() { @Test public void testCountAllWithColumn() { - assertThrows(IllegalStateException.class, () -> select().column("test1").countAll().from(DATABASE, "foobar"),"Can't count all with previously selected columns"); + assertThrows( + IllegalStateException.class, + () -> select().column("test1").countAll().from(DATABASE, "foobar"), + "Can't count all with previously selected columns"); } @Test public void testCountAllWithColumns() { - assertThrows(IllegalStateException.class, () -> select().column("test1").column("test2").countAll().from(DATABASE, "foobar"),"Can't count all with previously selected columns"); + assertThrows( + IllegalStateException.class, + () -> select().column("test1").column("test2").countAll().from(DATABASE, "foobar"), + "Can't count all with previously selected columns"); } @Test From f5e61c1c5756a476c2fc3a983da1a6c961618c27 Mon Sep 17 00:00:00 2001 From: gkatzioura Date: Sat, 1 Sep 2018 15:23:11 +0100 Subject: [PATCH 287/745] Added conjunction support --- .../org/influxdb/querybuilder/Appender.java | 12 +++++ .../org/influxdb/querybuilder/Select.java | 32 ++++++++---- .../querybuilder/clauses/AndConjunction.java | 15 ++++++ .../querybuilder/clauses/Conjunction.java | 6 +++ .../clauses/ConjunctionClause.java | 15 ++++++ .../querybuilder/clauses/OrConjunction.java | 15 ++++++ .../influxdb/querybuilder/BuiltQueryTest.java | 49 +++++++++++++++++++ 7 files changed, 135 insertions(+), 9 deletions(-) create mode 100644 src/main/java/org/influxdb/querybuilder/clauses/AndConjunction.java create mode 100644 src/main/java/org/influxdb/querybuilder/clauses/Conjunction.java create mode 100644 src/main/java/org/influxdb/querybuilder/clauses/ConjunctionClause.java create mode 100644 src/main/java/org/influxdb/querybuilder/clauses/OrConjunction.java diff --git a/src/main/java/org/influxdb/querybuilder/Appender.java b/src/main/java/org/influxdb/querybuilder/Appender.java index d2347057e..9aacc1a07 100644 --- a/src/main/java/org/influxdb/querybuilder/Appender.java +++ b/src/main/java/org/influxdb/querybuilder/Appender.java @@ -2,6 +2,7 @@ import java.util.List; import java.util.regex.Pattern; +import org.influxdb.querybuilder.clauses.ConjunctionClause; public final class Appender { @@ -10,6 +11,17 @@ public final class Appender { private Appender() { } + public static StringBuilder joinAndAppend( + final StringBuilder stringBuilder, final List clauses) { + for (int i = 0; i < clauses.size(); i++) { + if (i > 0) { + clauses.get(i).join(stringBuilder); + } + clauses.get(i).appendTo(stringBuilder); + } + return stringBuilder; + } + public static StringBuilder joinAndAppend( final StringBuilder stringBuilder, final String separator, diff --git a/src/main/java/org/influxdb/querybuilder/Select.java b/src/main/java/org/influxdb/querybuilder/Select.java index bbe92454b..29701d6ed 100644 --- a/src/main/java/org/influxdb/querybuilder/Select.java +++ b/src/main/java/org/influxdb/querybuilder/Select.java @@ -1,11 +1,20 @@ package org.influxdb.querybuilder; +import org.influxdb.querybuilder.clauses.Clause; +import org.influxdb.querybuilder.clauses.RawTextClause; +import org.influxdb.querybuilder.clauses.ConjunctionClause; +import org.influxdb.querybuilder.clauses.AndConjunction; +import org.influxdb.querybuilder.clauses.OrConjunction; + + +import static org.influxdb.querybuilder.Appender.appendName; +import static org.influxdb.querybuilder.Appender.joinAndAppend; +import static org.influxdb.querybuilder.Appender.joinAndAppendNames; + import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; -import org.influxdb.querybuilder.clauses.Clause; -import org.influxdb.querybuilder.clauses.RawTextClause; public class Select extends BuiltQuery { @@ -46,25 +55,25 @@ StringBuilder buildQueryString() { if (columns == null) { builder.append('*'); } else { - Appender.joinAndAppendNames(builder, columns); + joinAndAppendNames(builder, columns); } builder.append(" FROM "); - Appender.appendName(table, builder); + appendName(table, builder); if (!where.clauses.isEmpty()) { builder.append(" WHERE "); - Appender.joinAndAppend(builder, " AND ", where.clauses); + joinAndAppend(builder, where.clauses); } if (groupByColumns != null) { builder.append(" GROUP BY "); - Appender.joinAndAppendNames(builder, groupByColumns); + joinAndAppendNames(builder, groupByColumns); } if (ordering != null) { builder.append(" ORDER BY "); - Appender.joinAndAppend(builder, ",", Collections.singletonList(ordering)); + joinAndAppend(builder, ",", Collections.singletonList(ordering)); } if (limit != null) { @@ -123,14 +132,19 @@ public Select limit(final int limit, final long offSet) { public static class Where extends BuiltQueryDecorator { - private final List clauses = new ArrayList(); + private final List clauses = new ArrayList<>(); Where(final Select statement) { super(statement); @@ -148,6 +148,14 @@ public Where or(final Clause clause) { return this; } + public WhereNested andNested() { + return new WhereNested(this, false); + } + + public WhereNested orNested() { + return new WhereNested(this, true); + } + public Select orderBy(final Ordering orderings) { return query.orderBy(orderings); } @@ -165,6 +173,36 @@ public Select limit(final int limit, final long offSet) { } } + public static class WhereNested { + + private final List clauses = new ArrayList<>(); + private final boolean orConjunction; + private final Where where; + + public WhereNested(final Where where, final boolean orConjunction) { + this.where = where; + this.orConjunction = orConjunction; + } + + public WhereNested and(final Clause clause) { + clauses.add(new AndConjunction(clause)); + return this; + } + + public WhereNested or(final Clause clause) { + clauses.add(new OrConjunction(clause)); + return this; + } + + public Where close() { + if (orConjunction) { + return where.or(new NestedClause(clauses)); + } else { + return where.and(new NestedClause(clauses)); + } + } + } + public static class Builder { protected List columns; diff --git a/src/main/java/org/influxdb/querybuilder/clauses/ConjunctionClause.java b/src/main/java/org/influxdb/querybuilder/clauses/ConjunctionClause.java index d30d19d7b..b393fd81f 100644 --- a/src/main/java/org/influxdb/querybuilder/clauses/ConjunctionClause.java +++ b/src/main/java/org/influxdb/querybuilder/clauses/ConjunctionClause.java @@ -9,7 +9,7 @@ public ConjunctionClause(final Clause clause) { } @Override - public void appendTo(final StringBuilder sb) { - clause.appendTo(sb); + public void appendTo(final StringBuilder stringBuilder) { + clause.appendTo(stringBuilder); } } diff --git a/src/main/java/org/influxdb/querybuilder/clauses/NestedClause.java b/src/main/java/org/influxdb/querybuilder/clauses/NestedClause.java new file mode 100644 index 000000000..d4ca1e45e --- /dev/null +++ b/src/main/java/org/influxdb/querybuilder/clauses/NestedClause.java @@ -0,0 +1,21 @@ +package org.influxdb.querybuilder.clauses; + +import java.util.List; + +import static org.influxdb.querybuilder.Appender.joinAndAppend; + +public class NestedClause implements Clause { + + private final List conjunctionClauses; + + public NestedClause(final List conjunctionClauses) { + this.conjunctionClauses = conjunctionClauses; + } + + @Override + public void appendTo(final StringBuilder stringBuilder) { + stringBuilder.append("("); + joinAndAppend(stringBuilder, conjunctionClauses); + stringBuilder.append(")"); + } +} diff --git a/src/main/java/org/influxdb/querybuilder/clauses/SimpleClause.java b/src/main/java/org/influxdb/querybuilder/clauses/SimpleClause.java index d8e5056ba..f40396167 100644 --- a/src/main/java/org/influxdb/querybuilder/clauses/SimpleClause.java +++ b/src/main/java/org/influxdb/querybuilder/clauses/SimpleClause.java @@ -14,8 +14,8 @@ public SimpleClause(final String name, final String op, final Object value) { } @Override - public void appendTo(final StringBuilder sb) { - Appender.appendName(name, sb).append(op); - Appender.appendValue(value, sb); + public void appendTo(final StringBuilder stringBuilder) { + Appender.appendName(name, stringBuilder).append(op); + Appender.appendValue(value, stringBuilder); } } diff --git a/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java b/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java index e30951bd3..b11ed0e03 100644 --- a/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java +++ b/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java @@ -387,6 +387,43 @@ public void testOrAndConjunction() { assertEquals(query.getDatabase(), select.getDatabase()); } + @Test + public void testNestedClauses() { + Query query = + new Query( + "SELECT test1 FROM foobar WHERE test1=1 OR test2='a' OR test3='b' " + + "AND (test2='b' OR test3='a') " + + "OR (test1=2 AND test2='y' AND test3='z') " + + "AND (test1=8 OR test2='g' OR test3='j') " + + "AND test4='c';", + DATABASE); + Query select = + select() + .column("test1") + .from(DATABASE, "foobar") + .where(eq("test1", 1)) + .or(eq("test2", "a")) + .or(eq("test3", "b")) + .andNested() + .and(eq("test2","b")) + .or(eq("test3","a")) + .close() + .orNested() + .and(eq("test1",2)) + .and(eq("test2","y")) + .and(eq("test3","z")) + .close() + .andNested() + .or(eq("test1",8)) + .or(eq("test2","g")) + .or(eq("test3","j")) + .close() + .and(eq("test4", "c")); + + assertEquals(query.getCommand(), select.getCommand()); + assertEquals(query.getDatabase(), select.getDatabase()); + } + @Test public void testWhereGroupBy() { Query query = From d5697825d2709682f705e94d236fcc1d159d7fa3 Mon Sep 17 00:00:00 2001 From: gkatzioura Date: Sat, 1 Sep 2018 17:15:51 +0100 Subject: [PATCH 289/745] applied formatting --- src/main/java/org/influxdb/querybuilder/Select.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/main/java/org/influxdb/querybuilder/Select.java b/src/main/java/org/influxdb/querybuilder/Select.java index 839a34c5a..0785b0e12 100644 --- a/src/main/java/org/influxdb/querybuilder/Select.java +++ b/src/main/java/org/influxdb/querybuilder/Select.java @@ -87,6 +87,10 @@ StringBuilder buildQueryString() { return builder; } + public Where where() { + return where; + } + public Where where(final Clause clause) { return where.and(clause); } From 98542d6ed392653cc0456df90f85f2c878a2e8b5 Mon Sep 17 00:00:00 2001 From: gkatzioura Date: Sat, 1 Sep 2018 17:21:20 +0100 Subject: [PATCH 290/745] Added test for where() --- .../influxdb/querybuilder/BuiltQueryTest.java | 82 +++++++++++-------- 1 file changed, 46 insertions(+), 36 deletions(-) diff --git a/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java b/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java index b11ed0e03..27a1152f5 100644 --- a/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java +++ b/src/test/java/org/influxdb/querybuilder/BuiltQueryTest.java @@ -387,42 +387,52 @@ public void testOrAndConjunction() { assertEquals(query.getDatabase(), select.getDatabase()); } - @Test - public void testNestedClauses() { - Query query = - new Query( - "SELECT test1 FROM foobar WHERE test1=1 OR test2='a' OR test3='b' " + - "AND (test2='b' OR test3='a') " + - "OR (test1=2 AND test2='y' AND test3='z') " + - "AND (test1=8 OR test2='g' OR test3='j') " + - "AND test4='c';", - DATABASE); - Query select = - select() - .column("test1") - .from(DATABASE, "foobar") - .where(eq("test1", 1)) - .or(eq("test2", "a")) - .or(eq("test3", "b")) - .andNested() - .and(eq("test2","b")) - .or(eq("test3","a")) - .close() - .orNested() - .and(eq("test1",2)) - .and(eq("test2","y")) - .and(eq("test3","z")) - .close() - .andNested() - .or(eq("test1",8)) - .or(eq("test2","g")) - .or(eq("test3","j")) - .close() - .and(eq("test4", "c")); - - assertEquals(query.getCommand(), select.getCommand()); - assertEquals(query.getDatabase(), select.getDatabase()); - } + @Test + public void testNestedClauses() { + Query query = + new Query( + "SELECT test1 FROM foobar WHERE test1=1 OR test2='a' OR test3='b' " + + "AND (test2='b' OR test3='a') " + + "OR (test1=2 AND test2='y' AND test3='z') " + + "AND (test1=8 OR test2='g' OR test3='j') " + + "AND test4='c';", + DATABASE); + Query select = + select() + .column("test1") + .from(DATABASE, "foobar") + .where(eq("test1", 1)) + .or(eq("test2", "a")) + .or(eq("test3", "b")) + .andNested() + .and(eq("test2", "b")) + .or(eq("test3", "a")) + .close() + .orNested() + .and(eq("test1", 2)) + .and(eq("test2", "y")) + .and(eq("test3", "z")) + .close() + .andNested() + .or(eq("test1", 8)) + .or(eq("test2", "g")) + .or(eq("test3", "j")) + .close() + .and(eq("test4", "c")); + + assertEquals(query.getCommand(), select.getCommand()); + assertEquals(query.getDatabase(), select.getDatabase()); + } + + @Test + public void testWhere() { + Query query = new Query("SELECT test1 FROM foobar WHERE test4=1;", DATABASE); + Select.Where where = select().column("test1").from(DATABASE, "foobar").where(); + Query select = where.and(eq("test4", 1)); + + assertEquals(query.getCommand(), select.getCommand()); + assertEquals(query.getDatabase(), select.getDatabase()); + } @Test public void testWhereGroupBy() { From 94343301f32e3ebd756943dcdb5ff39c85486013 Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Wed, 29 Aug 2018 07:10:38 +0700 Subject: [PATCH 291/745] imlement issue #480 : UDP target host address is cached --- .../java/org/influxdb/impl/InfluxDBImpl.java | 39 ++++++++++++------- src/test/java/org/influxdb/InfluxDBTest.java | 16 ++++++++ 2 files changed, 40 insertions(+), 15 deletions(-) diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index a2f1019b6..a710da226 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -4,7 +4,6 @@ import com.squareup.moshi.JsonAdapter; import com.squareup.moshi.Moshi; import okhttp3.Headers; -import okhttp3.HttpUrl; import okhttp3.MediaType; import okhttp3.OkHttpClient; import okhttp3.Request; @@ -42,7 +41,10 @@ import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.InetAddress; +import java.net.InetSocketAddress; import java.net.SocketException; +import java.net.URI; +import java.net.URISyntaxException; import java.net.UnknownHostException; import java.nio.charset.StandardCharsets; import java.util.ArrayList; @@ -78,7 +80,7 @@ public class InfluxDBImpl implements InfluxDB { */ private static final LogLevel LOG_LEVEL = LogLevel.parseLogLevel(System.getProperty(LOG_LEVEL_PROPERTY)); - private final InetAddress hostAddress; + private final String hostName; private String version; private final Retrofit retrofit; private final InfluxDBService influxDBService; @@ -116,7 +118,7 @@ public class InfluxDBImpl implements InfluxDB { public InfluxDBImpl(final String url, final String username, final String password, final OkHttpClient.Builder client, final ResponseFormat responseFormat) { this.messagePack = ResponseFormat.MSGPACK.equals(responseFormat); - this.hostAddress = parseHostAddress(url); + this.hostName = parseHost(url); this.loggingInterceptor = new HttpLoggingInterceptor(); setLogLevel(LOG_LEVEL); @@ -162,7 +164,7 @@ public InfluxDBImpl(final String url, final String username, final String passwo final InfluxDBService influxDBService, final JsonAdapter adapter) { super(); this.messagePack = false; - this.hostAddress = parseHostAddress(url); + this.hostName = parseHost(url); this.loggingInterceptor = new HttpLoggingInterceptor(); setLogLevel(LOG_LEVEL); @@ -187,18 +189,25 @@ public InfluxDBImpl(final String url, final String username, final String passwo setRetentionPolicy(retentionPolicy); } - private InetAddress parseHostAddress(final String url) { - HttpUrl httpUrl = HttpUrl.parse(url); + private String parseHost(final String url) { + String hostName; + try { + URI uri = new URI(url); + hostName = uri.getHost(); + } catch (URISyntaxException e1) { + throw new IllegalArgumentException("Unable to parse url: " + url, e1); + } - if (httpUrl == null) { - throw new IllegalArgumentException("Unable to parse url: " + url); - } + if (hostName == null) { + throw new IllegalArgumentException("Unable to parse url: " + url); + } - try { - return InetAddress.getByName(httpUrl.host()); - } catch (UnknownHostException e) { - throw new InfluxDBIOException(e); - } + try { + InetAddress.getByName(hostName); + } catch (UnknownHostException e) { + throw new InfluxDBIOException(e); + } + return hostName; } @Override @@ -465,7 +474,7 @@ public void write(final int udpPort, final String records) { initialDatagramSocket(); byte[] bytes = records.getBytes(StandardCharsets.UTF_8); try { - datagramSocket.send(new DatagramPacket(bytes, bytes.length, hostAddress, udpPort)); + datagramSocket.send(new DatagramPacket(bytes, bytes.length, new InetSocketAddress(hostName, udpPort))); } catch (IOException e) { throw new InfluxDBIOException(e); } diff --git a/src/test/java/org/influxdb/InfluxDBTest.java b/src/test/java/org/influxdb/InfluxDBTest.java index d5af2bf2c..c852547cd 100644 --- a/src/test/java/org/influxdb/InfluxDBTest.java +++ b/src/test/java/org/influxdb/InfluxDBTest.java @@ -645,8 +645,24 @@ public void testWrongHostForInfluxdb(){ Assertions.assertThrows(RuntimeException.class, () -> { InfluxDBFactory.connect("http://" + errorHost + ":" + TestUtils.getInfluxPORT(true)); }); + + String unresolvableHost = "a.b.c"; + Assertions.assertThrows(InfluxDBIOException.class, () -> { + InfluxDBFactory.connect("http://" + unresolvableHost + ":" + TestUtils.getInfluxPORT(true)); + }); } + @Test + public void testInvalidUrlHandling(){ + Assertions.assertThrows(IllegalArgumentException.class, () -> { + InfluxDBFactory.connect("@@@http://@@@"); + }); + + Assertions.assertThrows(IllegalArgumentException.class, () -> { + InfluxDBFactory.connect("http://@@@abc"); + }); + } + @Test public void testBatchEnabledTwice() { this.influxDB.enableBatch(1, 1, TimeUnit.SECONDS); From 494c9247a4531b5e1bdb97674ae1586a9b828d1d Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Thu, 6 Sep 2018 12:52:59 +0700 Subject: [PATCH 292/745] fix issue #513 : Response body must be closed properly in case of JSON response --- src/main/java/org/influxdb/impl/InfluxDBImpl.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index a2f1019b6..173788fbb 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -842,6 +842,8 @@ public void process(final ResponseBody chunkedBody, final Consumer QueryResult queryResult = new QueryResult(); queryResult.setError("DONE"); consumer.accept(queryResult); + } finally { + chunkedBody.close(); } } } From ef729ee2f773de382c2c74fc019c72f4c2db0f11 Mon Sep 17 00:00:00 2001 From: Stefan Majer Date: Thu, 6 Sep 2018 16:43:24 +0200 Subject: [PATCH 293/745] Update junit from 5.2.0 -> 5.3.0, mockito from 2.19.0 -> 2.21.0 and assertj from 3.10.0 -> 3.11.1 --- pom.xml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pom.xml b/pom.xml index dd6cba7f9..0662dbb3e 100644 --- a/pom.xml +++ b/pom.xml @@ -218,13 +218,13 @@ org.junit.jupiter junit-jupiter-engine - 5.2.0 + 5.3.0 test org.junit.platform junit-platform-runner - 1.2.0 + 1.3.0 test @@ -236,13 +236,13 @@ org.assertj assertj-core - 3.10.0 + 3.11.1 test org.mockito mockito-core - 2.19.0 + 2.21.0 test From 3365b08d4deec90902a65f2ed029173c2d65bdce Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Fri, 7 Sep 2018 12:50:53 +0700 Subject: [PATCH 294/745] 2.13 changelog entries --- CHANGELOG.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 061b7bd77..a9c5bc3d1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,13 +3,17 @@ ## 2.13 [unreleased] ### Fixes - +- MessagePack queries: Exception during parsing InfluxDB version [macOS] [PR #487](https://github.com/influxdata/influxdb-java/issues/487) - The InfluxDBResultMapper is able to handle results with a different time precision [PR #501](https://github.com/influxdata/influxdb-java/pull/501) +- UDP target host address is cached [PR #502](https://github.com/influxdata/influxdb-java/issues/502) +- Error messages from server not parsed correctly when using msgpack [PR #506](https://github.com/influxdata/influxdb-java/issues/506) +- Response body must be closed properly in case of JSON response [PR #514](https://github.com/influxdata/influxdb-java/issues/514) ### Features - Support for Basic Authentication [PR #492](https://github.com/influxdata/influxdb-java/pull/492) - Added possibility to reuse client as a core part of [influxdb-java-reactive](https://github.com/bonitoo-io/influxdb-java-reactive) client [PR #493](https://github.com/influxdata/influxdb-java/pull/493) +- Retry capability for writing of BatchPoints [PR #503](https://github.com/influxdata/influxdb-java/issues/503) ## 2.12 [2018-07-31] From 9fe87198b4515236a1ff1ccbdc5b5c1a00713351 Mon Sep 17 00:00:00 2001 From: Jakub Bednar Date: Tue, 11 Sep 2018 09:03:00 +0200 Subject: [PATCH 295/745] added SSL client authentication howto --- FAQ.md | 36 +++++++++++++++++++++++++++++++++++- 1 file changed, 35 insertions(+), 1 deletion(-) diff --git a/FAQ.md b/FAQ.md index 7137936f1..b82a78a72 100644 --- a/FAQ.md +++ b/FAQ.md @@ -8,10 +8,10 @@ - [If there is an error during this background process, is it propagated to the rest of the client ?](#if-there-is-an-error-during-this-background-process-is-it-propagated-to-the-rest-of-the-client-) - [How the client responds to concurrent write backpressure from server ?](#how-the-client-responds-to-concurrent-write-backpressure-from-server-) - ## Security - [Is default config security setup TLS 1.2 ?](#is-default-config-security-setup-tls-12-) +- [How to use SSL client certificate authentication](#how-to-use-ssl-client-certificate-authentication-) ## Is the batch part of the client thread safe ? @@ -73,3 +73,37 @@ SSLv3/TLSv1/TLSv1.1/TLSv1.2 So if the server supports TLS1.2, the communication should be encrypted by TLS 1.2 (during the handshake the client will provide the list of accepted security protocols and the server will pick one, so this case the server would pick TLS 1.2) +## How to use SSL client certificate authentication + +To use SSL certificate authentication you need to setup `SslSocketFactory` on OkHttpClient.Builder. + +Here is the example, how to create InfluxDB client with the new SSLContext with custom identity keystore (p12) and truststore (jks): + +```java +KeyStore keyStore = KeyStore.getInstance("PKCS12"); +keyStore.load(new FileInputStream("conf/keystore.p12"), "changeme".toCharArray()); + +KeyStore trustStore = KeyStore.getInstance("JKS"); +trustStore.load(new FileInputStream("conf/trustStore.jks"), "changeme".toCharArray()); + +SSLContext sslContext = SSLContext.getInstance("SSL"); + +KeyManagerFactory keyManagerFactory = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); +keyManagerFactory.init(keyStore, "changeme".toCharArray()); + +TrustManagerFactory trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); +trustManagerFactory.init(trustStore); + +TrustManager[] trustManagers = trustManagerFactory.getTrustManagers(); + +sslContext.init(keyManagerFactory.getKeyManagers(), trustManagers, new SecureRandom()); +sslContext.getDefaultSSLParameters().setNeedClientAuth(true); + +OkHttpClient.Builder okhttpClientBuilder = new OkHttpClient.Builder(); +okhttpClientBuilder.sslSocketFactory(sslContext.getSocketFactory(), (X509TrustManager) trustManagers[0]); + +InfluxDB influxDB = InfluxDBFactory.connect("https://proxy_host:9086", okhttpClientBuilder); + +``` +InfluxDB (v1.6.2) does not have built-in support for client certificate ssl authentication. +SSL must be handled by http proxy such as Haproxy, nginx... From f111d84a562c730a74d86e48cc5503406fba2ab3 Mon Sep 17 00:00:00 2001 From: Hoan Xuan Le Date: Tue, 11 Sep 2018 22:07:58 +0700 Subject: [PATCH 296/745] fix issue #517 : missing millis and nanos in MsgPack --- .../msgpack/MessagePackTraverser.java | 15 ++++++--- .../org/influxdb/MessagePackInfluxDBTest.java | 31 +++++++++++++++---- .../msgpack/MessagePackTraverserTest.java | 5 +-- 3 files changed, 38 insertions(+), 13 deletions(-) diff --git a/src/main/java/org/influxdb/msgpack/MessagePackTraverser.java b/src/main/java/org/influxdb/msgpack/MessagePackTraverser.java index aecbd0c7a..5fab07cd5 100644 --- a/src/main/java/org/influxdb/msgpack/MessagePackTraverser.java +++ b/src/main/java/org/influxdb/msgpack/MessagePackTraverser.java @@ -8,6 +8,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.concurrent.TimeUnit; import org.influxdb.InfluxDBException; import org.influxdb.dto.QueryResult; @@ -27,6 +28,7 @@ */ public class MessagePackTraverser { + private static final byte MSG_PACK_TIME_EXT_TYPE = 5; private String lastStringNode; /** @@ -229,14 +231,17 @@ void traverse(final MessageUnpacker unpacker, final QueryResultModelPath queryRe } break; case EXTENSION: - final byte msgPackTimeExtType = (byte) 5; - final int timeOffset = 0; - final int timeByteArrayLength = 8; + final int nanosStartIndex = 8; extension = unpacker.unpackExtensionTypeHeader(); - if (extension.getType() == msgPackTimeExtType) { + if (extension.getType() == MSG_PACK_TIME_EXT_TYPE) { + //decode epoch nanos in accordance with https://github.com/tinylib/msgp/blob/master/msgp/write.go#L594 + dst = new byte[extension.getLength()]; unpacker.readPayload(dst); - o = ByteBuffer.wrap(dst, timeOffset, timeByteArrayLength).getLong(); + ByteBuffer bf = ByteBuffer.wrap(dst, 0, extension.getLength()); + long epochSeconds = bf.getLong(); + int nanosOffset = bf.getInt(nanosStartIndex); + o = TimeUnit.SECONDS.toNanos(epochSeconds) + nanosOffset; } break; diff --git a/src/test/java/org/influxdb/MessagePackInfluxDBTest.java b/src/test/java/org/influxdb/MessagePackInfluxDBTest.java index c8ab8b2c0..127983f87 100644 --- a/src/test/java/org/influxdb/MessagePackInfluxDBTest.java +++ b/src/test/java/org/influxdb/MessagePackInfluxDBTest.java @@ -5,6 +5,7 @@ import static org.mockito.Mockito.spy; import java.io.IOException; +import java.time.Instant; import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; @@ -91,9 +92,17 @@ public void testWriteBatchWithPrecision() throws Exception { // THEN the measure points have a timestamp with second precision QueryResult queryResult = this.influxDB.query(new Query("SELECT * FROM " + measurement, dbName)); - Assertions.assertEquals(queryResult.getResults().get(0).getSeries().get(0).getValues().get(0).get(0), t1); - Assertions.assertEquals(queryResult.getResults().get(0).getSeries().get(0).getValues().get(1).get(0), t2); - Assertions.assertEquals(queryResult.getResults().get(0).getSeries().get(0).getValues().get(2).get(0), t3); + long bySecond = TimeUnit.NANOSECONDS.toSeconds( + (Long) queryResult.getResults().get(0).getSeries().get(0).getValues().get(0).get(0)); + Assertions.assertEquals(bySecond, t1); + + bySecond = TimeUnit.NANOSECONDS.toSeconds( + (Long) queryResult.getResults().get(0).getSeries().get(0).getValues().get(1).get(0)); + Assertions.assertEquals(bySecond, t2); + + bySecond = TimeUnit.NANOSECONDS.toSeconds( + (Long) queryResult.getResults().get(0).getSeries().get(0).getValues().get(2).get(0)); + Assertions.assertEquals(bySecond, t3); this.influxDB.deleteDatabase(dbName); } @@ -182,9 +191,19 @@ public void testWriteRecordsWithPrecision() throws Exception { // THEN the measure points have a timestamp with second precision QueryResult queryResult = this.influxDB.query(new Query("SELECT * FROM " + measurement, dbName)); Assertions.assertEquals(queryResult.getResults().get(0).getSeries().get(0).getValues().size(), 3); - Assertions.assertEquals(queryResult.getResults().get(0).getSeries().get(0).getValues().get(0).get(0), timeP1); - Assertions.assertEquals(queryResult.getResults().get(0).getSeries().get(0).getValues().get(1).get(0), timeP2); - Assertions.assertEquals(queryResult.getResults().get(0).getSeries().get(0).getValues().get(2).get(0), timeP3); + + long bySecond = TimeUnit.NANOSECONDS.toSeconds( + (Long) queryResult.getResults().get(0).getSeries().get(0).getValues().get(0).get(0)); + Assertions.assertEquals(bySecond, timeP1); + + bySecond = TimeUnit.NANOSECONDS.toSeconds( + (Long) queryResult.getResults().get(0).getSeries().get(0).getValues().get(1).get(0)); + Assertions.assertEquals(bySecond, timeP2); + + bySecond = TimeUnit.NANOSECONDS.toSeconds( + (Long) queryResult.getResults().get(0).getSeries().get(0).getValues().get(2).get(0)); + Assertions.assertEquals(bySecond, timeP3); + this.influxDB.deleteDatabase(dbName); } diff --git a/src/test/java/org/influxdb/msgpack/MessagePackTraverserTest.java b/src/test/java/org/influxdb/msgpack/MessagePackTraverserTest.java index 67d6b41db..19a00a001 100644 --- a/src/test/java/org/influxdb/msgpack/MessagePackTraverserTest.java +++ b/src/test/java/org/influxdb/msgpack/MessagePackTraverserTest.java @@ -33,7 +33,8 @@ public void testTraverseMethod() { QueryResult result = iter.next(); List> values = result.getResults().get(0).getSeries().get(0).getValues(); Assertions.assertEquals(2, values.size()); - assertEquals(1532325083L, values.get(0).get(0)); + + assertEquals(1532325083803052600L, values.get(0).get(0)); assertEquals("b", values.get(1).get(1)); assertTrue(iter.hasNext()); @@ -56,7 +57,7 @@ public void testParseMethodOnNonEmptyResult() { QueryResult queryResult = traverser.parse(MessagePackTraverserTest.class.getResourceAsStream("msgpack_2.bin")); List> values = queryResult.getResults().get(0).getSeries().get(0).getValues(); Assertions.assertEquals(3, values.size()); - assertEquals(1485273600L, values.get(0).get(0)); + assertEquals(1485273600000000000L, values.get(0).get(0)); assertEquals("two", values.get(1).get(1)); assertEquals(3.0, values.get(2).get(2)); } From 798b164b37314f167b9b4d7ce4697264c568c5ed Mon Sep 17 00:00:00 2001 From: Jakub Bednar Date: Thu, 6 Sep 2018 07:37:29 +0200 Subject: [PATCH 297/745] Added onComplete notification for successfully end of stream --- FAQ.md | 13 +++++ src/main/java/org/influxdb/InfluxDB.java | 18 ++++++- .../java/org/influxdb/impl/InfluxDBImpl.java | 25 +++++++--- src/test/java/org/influxdb/InfluxDBTest.java | 49 +++++++++++++++++++ 4 files changed, 97 insertions(+), 8 deletions(-) diff --git a/FAQ.md b/FAQ.md index b82a78a72..c15207722 100644 --- a/FAQ.md +++ b/FAQ.md @@ -7,6 +7,7 @@ - [And if so, is there a single thread in the background that is emptying batch to the server ?](#and-if-so-is-there-a-single-thread-in-the-background-that-is-emptying-batch-to-the-server-) - [If there is an error during this background process, is it propagated to the rest of the client ?](#if-there-is-an-error-during-this-background-process-is-it-propagated-to-the-rest-of-the-client-) - [How the client responds to concurrent write backpressure from server ?](#how-the-client-responds-to-concurrent-write-backpressure-from-server-) +- [Is there a way to tell that all query chunks have arrived ?](#is-there-a-way-to-tell-that-all-query-chunks-have-arrived-) ## Security @@ -61,6 +62,18 @@ Form version 2.9, influxdb-java introduces new error handling feature, the clien So in case the number of write requests exceeds Concurrent write setting at server side, influxdb-java can try to make sure no writing points get lost (due to rejection from server) +## Is there a way to tell that all query chunks have arrived ? +Yes, there is __onComplete__ action that is invoked after successfully end of stream. +```java +influxDB.query(new Query("SELECT * FROM disk", "telegraf"), 10_000, + queryResult -> { + System.out.println("result = " + queryResult); + }, + () -> { + System.out.println("The query successfully finished."); + }); +``` + ## Is default config security setup TLS 1.2 ? (answer need to be verified) diff --git a/src/main/java/org/influxdb/InfluxDB.java b/src/main/java/org/influxdb/InfluxDB.java index a72856bd0..22c25dd40 100644 --- a/src/main/java/org/influxdb/InfluxDB.java +++ b/src/main/java/org/influxdb/InfluxDB.java @@ -451,10 +451,24 @@ public void write(final String database, final String retentionPolicy, * the query to execute. * @param chunkSize * the number of QueryResults to process in one chunk. - * @param consumer + * @param onNext * the consumer to invoke for each received QueryResult */ - public void query(Query query, int chunkSize, Consumer consumer); + public void query(Query query, int chunkSize, Consumer onNext); + + /** + * Execute a streaming query against a database. + * + * @param query + * the query to execute. + * @param chunkSize + * the number of QueryResults to process in one chunk. + * @param onNext + * the consumer to invoke for each received QueryResult + * @param onComplete + * the onComplete to invoke for successfully end of stream + */ + public void query(Query query, int chunkSize, Consumer onNext, Runnable onComplete); /** * Execute a query against a database. diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index 5c03b7ea7..867ce1450 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -566,7 +566,16 @@ public void onFailure(final Call call, final Throwable throwable) { * {@inheritDoc} */ @Override - public void query(final Query query, final int chunkSize, final Consumer consumer) { + public void query(final Query query, final int chunkSize, final Consumer onNext) { + query(query, chunkSize, onNext, () -> { }); + } + + /** + * {@inheritDoc} + */ + @Override + public void query(final Query query, final int chunkSize, final Consumer onNext, + final Runnable onComplete) { Call call = null; if (query instanceof BoundParameterQuery) { BoundParameterQuery boundParameterQuery = (BoundParameterQuery) query; @@ -582,7 +591,7 @@ public void onResponse(final Call call, final Response call, final Response consumer) throws IOException; + void process(ResponseBody chunkedBody, Consumer consumer, Runnable onComplete) throws IOException; } private class MessagePackChunkProccesor implements ChunkProccesor { @Override - public void process(final ResponseBody chunkedBody, final Consumer consumer) throws IOException { + public void process(final ResponseBody chunkedBody, final Consumer consumer, final Runnable onComplete) + throws IOException { MessagePackTraverser traverser = new MessagePackTraverser(); try (InputStream is = chunkedBody.byteStream()) { for (QueryResult result : traverser.traverse(is)) { consumer.accept(result); } } + onComplete.run(); } } @@ -875,7 +886,8 @@ public JSONChunkProccesor(final JsonAdapter adapter) { } @Override - public void process(final ResponseBody chunkedBody, final Consumer consumer) throws IOException { + public void process(final ResponseBody chunkedBody, final Consumer consumer, final Runnable onComplete) + throws IOException { try { BufferedSource source = chunkedBody.source(); while (true) { @@ -888,6 +900,7 @@ public void process(final ResponseBody chunkedBody, final Consumer QueryResult queryResult = new QueryResult(); queryResult.setError("DONE"); consumer.accept(queryResult); + onComplete.run(); } finally { chunkedBody.close(); } diff --git a/src/test/java/org/influxdb/InfluxDBTest.java b/src/test/java/org/influxdb/InfluxDBTest.java index c852547cd..a2d36c3e0 100644 --- a/src/test/java/org/influxdb/InfluxDBTest.java +++ b/src/test/java/org/influxdb/InfluxDBTest.java @@ -827,6 +827,55 @@ public void accept(QueryResult result) { } } + @Test + public void testChunkingOnComplete() throws InterruptedException { + if (this.influxDB.version().startsWith("0.") || this.influxDB.version().startsWith("1.0")) { + // do not test version 0.13 and 1.0 + return; + } + + String dbName = "write_unittest_" + System.currentTimeMillis(); + this.influxDB.createDatabase(dbName); + String rp = TestUtils.defaultRetentionPolicy(this.influxDB.version()); + BatchPoints batchPoints = BatchPoints.database(dbName).retentionPolicy(rp).build(); + Point point1 = Point.measurement("disk").tag("atag", "a").addField("used", 60L).addField("free", 1L).build(); + Point point2 = Point.measurement("disk").tag("atag", "b").addField("used", 70L).addField("free", 2L).build(); + Point point3 = Point.measurement("disk").tag("atag", "c").addField("used", 80L).addField("free", 3L).build(); + batchPoints.point(point1); + batchPoints.point(point2); + batchPoints.point(point3); + this.influxDB.write(batchPoints); + + CountDownLatch countDownLatch = new CountDownLatch(1); + + Thread.sleep(2000); + Query query = new Query("SELECT * FROM disk", dbName); + this.influxDB.query(query, 2, result -> {}, countDownLatch::countDown); + + Thread.sleep(2000); + this.influxDB.deleteDatabase(dbName); + + boolean await = countDownLatch.await(10, TimeUnit.SECONDS); + Assertions.assertTrue(await, "The onComplete action did not arrive!"); + } + + @Test + public void testChunkingFailOnComplete() throws InterruptedException { + if (this.influxDB.version().startsWith("0.") || this.influxDB.version().startsWith("1.0")) { + // do not test version 0.13 and 1.0 + return; + } + String dbName = "write_unittest_" + System.currentTimeMillis(); + this.influxDB.createDatabase(dbName); + final CountDownLatch countDownLatch = new CountDownLatch(1); + Query query = new Query("UNKNOWN_QUERY", dbName); + this.influxDB.query(query, 10, result -> {}, countDownLatch::countDown); + this.influxDB.deleteDatabase(dbName); + + boolean await = countDownLatch.await(5, TimeUnit.SECONDS); + Assertions.assertFalse(await, "The onComplete action arrive!"); + } + @Test public void testFlushPendingWritesWhenBatchingEnabled() { String dbName = "flush_tests_" + System.currentTimeMillis(); From 6c34d4553c7dc093107bd710d4ffaf1c268baae7 Mon Sep 17 00:00:00 2001 From: Jakub Bednar Date: Thu, 6 Sep 2018 10:47:32 +0200 Subject: [PATCH 298/745] Added onNext consumer that has capability to discontinue a streaming query --- FAQ.md | 29 +++++-- src/main/java/org/influxdb/InfluxDB.java | 49 +++++++++++ .../java/org/influxdb/impl/InfluxDBImpl.java | 65 +++++++++++--- src/test/java/org/influxdb/InfluxDBTest.java | 87 +++++++++++++++++++ .../org/influxdb/MessagePackInfluxDBTest.java | 48 ++++++++++ 5 files changed, 259 insertions(+), 19 deletions(-) diff --git a/FAQ.md b/FAQ.md index c15207722..651ebae3d 100644 --- a/FAQ.md +++ b/FAQ.md @@ -8,6 +8,7 @@ - [If there is an error during this background process, is it propagated to the rest of the client ?](#if-there-is-an-error-during-this-background-process-is-it-propagated-to-the-rest-of-the-client-) - [How the client responds to concurrent write backpressure from server ?](#how-the-client-responds-to-concurrent-write-backpressure-from-server-) - [Is there a way to tell that all query chunks have arrived ?](#is-there-a-way-to-tell-that-all-query-chunks-have-arrived-) +- [Is there a way to tell the system to stop sending more chunks once I've found what I'm looking for ?](#is-there-a-way-to-tell-the-system-to-stop-sending-more-chunks-once-ive-found-what-im-looking-for-) ## Security @@ -66,12 +67,28 @@ So in case the number of write requests exceeds Concurrent write setting at serv Yes, there is __onComplete__ action that is invoked after successfully end of stream. ```java influxDB.query(new Query("SELECT * FROM disk", "telegraf"), 10_000, - queryResult -> { - System.out.println("result = " + queryResult); - }, - () -> { - System.out.println("The query successfully finished."); - }); + queryResult -> { + System.out.println("result = " + queryResult); + }, + () -> { + System.out.println("The query successfully finished."); + }); +``` + +## Is there a way to tell the system to stop sending more chunks once I've found what I'm looking for ? +Yes, there is __onNext__ bi-consumer with capability to discontinue a streaming query. +```java +influxDB.query(new Query("SELECT * FROM disk", "telegraf"), 10_000, (cancellable, queryResult) -> { + + // found what I'm looking for ? + if (foundRequest(queryResult)) { + // yes => cancel query + cancellable.cancel(); + } + + // no => process next result + processResult(queryResult); +}); ``` ## Is default config security setup TLS 1.2 ? diff --git a/src/main/java/org/influxdb/InfluxDB.java b/src/main/java/org/influxdb/InfluxDB.java index 22c25dd40..85ab1af4f 100644 --- a/src/main/java/org/influxdb/InfluxDB.java +++ b/src/main/java/org/influxdb/InfluxDB.java @@ -5,6 +5,7 @@ import org.influxdb.dto.Pong; import org.influxdb.dto.Query; import org.influxdb.dto.QueryResult; +import retrofit2.Call; import java.util.List; import java.util.concurrent.ThreadFactory; @@ -103,6 +104,28 @@ public enum ResponseFormat { /** application/x-msgpack format. */ MSGPACK } + + /** + * A cancelable allows to discontinue a streaming query. + */ + public interface Cancellable { + + /** + * Cancel the streaming query call. + * + * @see Call#cancel() + */ + void cancel(); + + /** + * Return {@code true} if the {@link Cancellable#cancel()} was called. + * + * @return {@code true} if the {@link Cancellable#cancel()} was called + * @see Call#isCanceled() + */ + boolean isCanceled(); + } + /** * Set the loglevel which is used for REST related actions. * @@ -456,6 +479,18 @@ public void write(final String database, final String retentionPolicy, */ public void query(Query query, int chunkSize, Consumer onNext); + /** + * Execute a streaming query against a database. + * + * @param query + * the query to execute. + * @param chunkSize + * the number of QueryResults to process in one chunk. + * @param onNext + * the consumer to invoke for each received QueryResult; with capability to discontinue a streaming query + */ + public void query(Query query, int chunkSize, BiConsumer onNext); + /** * Execute a streaming query against a database. * @@ -470,6 +505,20 @@ public void write(final String database, final String retentionPolicy, */ public void query(Query query, int chunkSize, Consumer onNext, Runnable onComplete); + /** + * Execute a streaming query against a database. + * + * @param query + * the query to execute. + * @param chunkSize + * the number of QueryResults to process in one chunk. + * @param onNext + * the consumer to invoke for each received QueryResult; with capability to discontinue a streaming query + * @param onComplete + * the onComplete to invoke for successfully end of stream + */ + public void query(Query query, int chunkSize, BiConsumer onNext, Runnable onComplete); + /** * Execute a query against a database. * diff --git a/src/main/java/org/influxdb/impl/InfluxDBImpl.java b/src/main/java/org/influxdb/impl/InfluxDBImpl.java index 867ce1450..1b2a7f483 100644 --- a/src/main/java/org/influxdb/impl/InfluxDBImpl.java +++ b/src/main/java/org/influxdb/impl/InfluxDBImpl.java @@ -48,6 +48,7 @@ import java.net.UnknownHostException; import java.nio.charset.StandardCharsets; import java.util.ArrayList; +import java.util.Iterator; import java.util.Collections; import java.util.List; import java.util.concurrent.Executors; @@ -77,7 +78,7 @@ public class InfluxDBImpl implements InfluxDB { * This static constant holds the http logging log level expected in DEBUG mode * It is set by System property {@code org.influxdb.InfluxDB.logLevel}. * - * @see org.influxdb.impl.LOG_LEVEL_PROPERTY + * @see org.influxdb.InfluxDB#LOG_LEVEL_PROPERTY */ private static final LogLevel LOG_LEVEL = LogLevel.parseLogLevel(System.getProperty(LOG_LEVEL_PROPERTY)); @@ -570,12 +571,29 @@ public void query(final Query query, final int chunkSize, final Consumer { }); } + /** + * {@inheritDoc} + */ + @Override + public void query(final Query query, final int chunkSize, final BiConsumer onNext) { + query(query, chunkSize, onNext, () -> { }); + } + /** * {@inheritDoc} */ @Override public void query(final Query query, final int chunkSize, final Consumer onNext, final Runnable onComplete) { + query(query, chunkSize, (cancellable, queryResult) -> onNext.accept(queryResult), onComplete); + } + + /** + * {@inheritDoc} + */ + @Override + public void query(final Query query, final int chunkSize, final BiConsumer onNext, + final Runnable onComplete) { Call call = null; if (query instanceof BoundParameterQuery) { BoundParameterQuery boundParameterQuery = (BoundParameterQuery) query; @@ -588,10 +606,23 @@ public void query(final Query query, final int chunkSize, final Consumer() { @Override public void onResponse(final Call call, final Response response) { + + Cancellable cancellable = new Cancellable() { + @Override + public void cancel() { + call.cancel(); + } + + @Override + public boolean isCanceled() { + return call.isCanceled(); + } + }; + try { if (response.isSuccessful()) { ResponseBody chunkedBody = response.body(); - chunkProccesor.process(chunkedBody, onNext, onComplete); + chunkProccesor.process(chunkedBody, cancellable, onNext, onComplete); } else { // REVIEW: must be handled consistently with IOException. ResponseBody errorBody = response.errorBody(); @@ -602,7 +633,7 @@ public void onResponse(final Call call, final Response consumer, Runnable onComplete) throws IOException; + void process(ResponseBody chunkedBody, Cancellable cancellable, + BiConsumer consumer, Runnable onComplete) throws IOException; } private class MessagePackChunkProccesor implements ChunkProccesor { @Override - public void process(final ResponseBody chunkedBody, final Consumer consumer, final Runnable onComplete) + public void process(final ResponseBody chunkedBody, final Cancellable cancellable, + final BiConsumer consumer, final Runnable onComplete) throws IOException { MessagePackTraverser traverser = new MessagePackTraverser(); try (InputStream is = chunkedBody.byteStream()) { - for (QueryResult result : traverser.traverse(is)) { - consumer.accept(result); + for (Iterator it = traverser.traverse(is).iterator(); it.hasNext() && !cancellable.isCanceled();) { + QueryResult result = it.next(); + consumer.accept(cancellable, result); } } - onComplete.run(); + if (!cancellable.isCanceled()) { + onComplete.run(); + } } } @@ -886,21 +922,24 @@ public JSONChunkProccesor(final JsonAdapter adapter) { } @Override - public void process(final ResponseBody chunkedBody, final Consumer consumer, final Runnable onComplete) + public void process(final ResponseBody chunkedBody, final Cancellable cancellable, + final BiConsumer consumer, final Runnable onComplete) throws IOException { try { BufferedSource source = chunkedBody.source(); - while (true) { + while (!cancellable.isCanceled()) { QueryResult result = adapter.fromJson(source); if (result != null) { - consumer.accept(result); + consumer.accept(cancellable, result); } } } catch (EOFException e) { QueryResult queryResult = new QueryResult(); queryResult.setError("DONE"); - consumer.accept(queryResult); - onComplete.run(); + consumer.accept(cancellable, queryResult); + if (!cancellable.isCanceled()) { + onComplete.run(); + } } finally { chunkedBody.close(); } diff --git a/src/test/java/org/influxdb/InfluxDBTest.java b/src/test/java/org/influxdb/InfluxDBTest.java index a2d36c3e0..6d6a2b3e8 100644 --- a/src/test/java/org/influxdb/InfluxDBTest.java +++ b/src/test/java/org/influxdb/InfluxDBTest.java @@ -36,6 +36,8 @@ import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.LongAdder; +import java.util.function.BiConsumer; import java.util.function.Consumer; /** @@ -780,6 +782,11 @@ public void accept(QueryResult result) { Assertions.assertNotNull(result); System.out.println(result); Assertions.assertEquals(1, result.getResults().get(0).getSeries().get(0).getValues().size()); + + result = queue.poll(20, TimeUnit.SECONDS); + Assertions.assertNotNull(result); + System.out.println(result); + Assertions.assertEquals("DONE", result.getError()); } /** @@ -876,6 +883,86 @@ public void testChunkingFailOnComplete() throws InterruptedException { Assertions.assertFalse(await, "The onComplete action arrive!"); } + @Test + public void testChunkingCancelQuery() throws InterruptedException { + if (this.influxDB.version().startsWith("0.") || this.influxDB.version().startsWith("1.0")) { + // do not test version 0.13 and 1.0 + return; + } + + String dbName = "write_unittest_" + System.currentTimeMillis(); + this.influxDB.createDatabase(dbName); + String rp = TestUtils.defaultRetentionPolicy(this.influxDB.version()); + BatchPoints batchPoints = BatchPoints.database(dbName).retentionPolicy(rp).build(); + for (int i = 0; i < 10; i++) + { + Point point = Point.measurement("disk") + .tag("atag", "a") + .addField("used", 60L + (i * 10)) + .addField("free", 1L + i) + .time(i, TimeUnit.SECONDS) + .build(); + + batchPoints.point(point); + } + + Assertions.assertEquals(batchPoints.getPoints().size(), 10); + this.influxDB.write(batchPoints); + Thread.sleep(2000); + + LongAdder chunkCount = new LongAdder(); + + Query query = new Query("SELECT * FROM disk", dbName); + this.influxDB.query(query, 2, (cancellable, queryResult) -> { + + chunkCount.increment(); + + // after three chunks stop stream ("free" field == 5) + Number free = (Number) queryResult.getResults().get(0).getSeries().get(0).getValues().get(0).get(2); + if (free.intValue() == 5) { + + cancellable.cancel(); + } + }); + + Thread.sleep(5_000); + + Assertions.assertEquals(3, chunkCount.intValue()); + } + + @Test + public void testChunkingCancelQueryOnComplete() throws InterruptedException { + + if (this.influxDB.version().startsWith("0.") || this.influxDB.version().startsWith("1.0")) { + // do not test version 0.13 and 1.0 + return; + } + + String dbName = "write_unittest_" + System.currentTimeMillis(); + this.influxDB.createDatabase(dbName); + String rp = TestUtils.defaultRetentionPolicy(this.influxDB.version()); + BatchPoints batchPoints = BatchPoints.database(dbName).retentionPolicy(rp).build(); + Point point1 = Point.measurement("disk").tag("atag", "a").addField("used", 60L).addField("free", 1L).build(); + Point point2 = Point.measurement("disk").tag("atag", "b").addField("used", 70L).addField("free", 2L).build(); + Point point3 = Point.measurement("disk").tag("atag", "c").addField("used", 80L).addField("free", 3L).build(); + batchPoints.point(point1); + batchPoints.point(point2); + batchPoints.point(point3); + this.influxDB.write(batchPoints); + + CountDownLatch countDownLatch = new CountDownLatch(1); + + Thread.sleep(2000); + Query query = new Query("SELECT * FROM disk", dbName); + this.influxDB.query(query, 2, (cancellable, queryResult) -> cancellable.cancel(), countDownLatch::countDown); + + Thread.sleep(2000); + this.influxDB.deleteDatabase(dbName); + + boolean await = countDownLatch.await(5, TimeUnit.SECONDS); + Assertions.assertFalse(await, "The onComplete action arrive!"); + } + @Test public void testFlushPendingWritesWhenBatchingEnabled() { String dbName = "flush_tests_" + System.currentTimeMillis(); diff --git a/src/test/java/org/influxdb/MessagePackInfluxDBTest.java b/src/test/java/org/influxdb/MessagePackInfluxDBTest.java index 127983f87..adf3e016a 100644 --- a/src/test/java/org/influxdb/MessagePackInfluxDBTest.java +++ b/src/test/java/org/influxdb/MessagePackInfluxDBTest.java @@ -8,7 +8,10 @@ import java.time.Instant; import java.util.ArrayList; import java.util.List; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; import org.influxdb.InfluxDB.ResponseFormat; import org.influxdb.dto.BatchPoints; @@ -207,6 +210,51 @@ public void testWriteRecordsWithPrecision() throws Exception { this.influxDB.deleteDatabase(dbName); } + @Override + @Test + public void testChunking() throws InterruptedException { + if (this.influxDB.version().startsWith("0.") || this.influxDB.version().startsWith("1.0")) { + // do not test version 0.13 and 1.0 + return; + } + String dbName = "write_unittest_" + System.currentTimeMillis(); + this.influxDB.createDatabase(dbName); + String rp = TestUtils.defaultRetentionPolicy(this.influxDB.version()); + BatchPoints batchPoints = BatchPoints.database(dbName).retentionPolicy(rp).build(); + Point point1 = Point.measurement("disk").tag("atag", "a").addField("used", 60L).addField("free", 1L).build(); + Point point2 = Point.measurement("disk").tag("atag", "b").addField("used", 70L).addField("free", 2L).build(); + Point point3 = Point.measurement("disk").tag("atag", "c").addField("used", 80L).addField("free", 3L).build(); + batchPoints.point(point1); + batchPoints.point(point2); + batchPoints.point(point3); + this.influxDB.write(batchPoints); + + Thread.sleep(2000); + final BlockingQueue queue = new LinkedBlockingQueue<>(); + Query query = new Query("SELECT * FROM disk", dbName); + this.influxDB.query(query, 2, new Consumer() { + @Override + public void accept(QueryResult result) { + queue.add(result); + }}); + + Thread.sleep(2000); + this.influxDB.deleteDatabase(dbName); + + QueryResult result = queue.poll(20, TimeUnit.SECONDS); + Assertions.assertNotNull(result); + System.out.println(result); + Assertions.assertEquals(2, result.getResults().get(0).getSeries().get(0).getValues().size()); + + result = queue.poll(20, TimeUnit.SECONDS); + Assertions.assertNotNull(result); + System.out.println(result); + Assertions.assertEquals(1, result.getResults().get(0).getSeries().get(0).getValues().size()); + + result = queue.poll(5, TimeUnit.SECONDS); + Assertions.assertNull(result); + } + @Test public void testInfluxDBVersionChecking() throws InterruptedException, IOException { From 636e2adcb8620f69e8d6cc49144cec51b25ccd1a Mon Sep 17 00:00:00 2001 From: Jakub Bednar Date: Fri, 7 Sep 2018 11:58:05 +0200 Subject: [PATCH 299/745] Updated CHANGELOG.md --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a9c5bc3d1..364eabd5a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,8 @@ - Support for Basic Authentication [PR #492](https://github.com/influxdata/influxdb-java/pull/492) - Added possibility to reuse client as a core part of [influxdb-java-reactive](https://github.com/bonitoo-io/influxdb-java-reactive) client [PR #493](https://github.com/influxdata/influxdb-java/pull/493) - Retry capability for writing of BatchPoints [PR #503](https://github.com/influxdata/influxdb-java/issues/503) +- Added `BiConsumer` with capability to discontinue a streaming query [Issue #515](https://github.com/influxdata/influxdb-java/issues/515) +- Added `onComplete` action that is invoked after successfully end of streaming query [Issue #515](https://github.com/influxdata/influxdb-java/issues/515) ## 2.12 [2018-07-31] From b56f1faf649676274ff26fcbe379d1bbfcefe40a Mon Sep 17 00:00:00 2001 From: ivankudibal Date: Wed, 12 Sep 2018 12:48:32 +0200 Subject: [PATCH 300/745] Update CHANGELOG.md with #517 --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 364eabd5a..7ef829394 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,7 @@ - UDP target host address is cached [PR #502](https://github.com/influxdata/influxdb-java/issues/502) - Error messages from server not parsed correctly when using msgpack [PR #506](https://github.com/influxdata/influxdb-java/issues/506) - Response body must be closed properly in case of JSON response [PR #514](https://github.com/influxdata/influxdb-java/issues/514) +- Time is serialized not consistently in MsgPack and Json, missing millis and nanos in MsgPack[PR #517](https://github.com/influxdata/influxdb-java/issues/517) ### Features From 2fb54cffff8ced5a2868b664f34c57f77ce197ca Mon Sep 17 00:00:00 2001 From: Tomas Klapka Date: Wed, 12 Sep 2018 13:23:28 +0200 Subject: [PATCH 301/745] [maven-release-plugin] prepare release influxdb-java-2.13 --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 0662dbb3e..3def128bc 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.influxdb influxdb-java jar - 2.13-SNAPSHOT + 2.13 influxdb java bindings Java API to access the InfluxDB REST API http://www.influxdb.org @@ -28,7 +28,7 @@ scm:git:git@github.com:influxdata/influxdb-java.git scm:git:git@github.com:influxdata/influxdb-java.git git@github.com:influxdata/influxdb-java.git - HEAD + influxdb-java-2.13 From 77e7b86bd07ad2bd52b7f734b9e1f71db7966d32 Mon Sep 17 00:00:00 2001 From: Tomas Klapka Date: Wed, 12 Sep 2018 13:23:37 +0200 Subject: [PATCH 302/745] [maven-release-plugin] prepare for next development iteration --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 3def128bc..de2d29f86 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.influxdb influxdb-java jar - 2.13 + 2.14-SNAPSHOT influxdb java bindings Java API to access the InfluxDB REST API http://www.influxdb.org @@ -28,7 +28,7 @@ scm:git:git@github.com:influxdata/influxdb-java.git scm:git:git@github.com:influxdata/influxdb-java.git git@github.com:influxdata/influxdb-java.git - influxdb-java-2.13 + HEAD From ba0294117905c778b0afac971d7678b14e1627a9 Mon Sep 17 00:00:00 2001 From: ivankudibal Date: Wed, 12 Sep 2018 18:32:06 +0200 Subject: [PATCH 303/745] update CHANGELOG, add release date --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7ef829394..95e2d3fc6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # Changelog -## 2.13 [unreleased] +## 2.13 [2018-09-12] ### Fixes - MessagePack queries: Exception during parsing InfluxDB version [macOS] [PR #487](https://github.com/influxdata/influxdb-java/issues/487) From 05f2a039781c625835b75449a3a7d0b1933f0dcc Mon Sep 17 00:00:00 2001 From: gkatzioura Date: Thu, 13 Sep 2018 21:37:19 +0100 Subject: [PATCH 304/745] Feature/sub query (#1) * Change in the format * Added interfaces for basic functionality * Setting the selection core inside the selection query * Query mechanism operate on core * Core implementations do the job * Simplified from clause * Changed query creation * Formatted latest changes * Added subquery successfully * Supporting sub queries * Removed access to public constructors * Formatting * Removed generics where not needed * Removed unecessary generics * Removed unnecessary diamond expression * Simplified construction of select query * Changed select query * Added simplified subquery creation * Added simplified subquery creation * Changed query builder * Fixed formatting * Moving to subquery * More fun with diamonds * Generified sub query functionality * Added influxdb query builder * Minor changes * Added flexibility on the query building * Generified subqueries * Sub query simplification * Added sub query * Added sub query feature * Added test for subquery * Added query string * Added support for slimit * Added tz support * Added timezone test * Added regex on clause * Added formatting * Added subqueries --- .../org/influxdb/querybuilder/Appender.java | 4 + .../org/influxdb/querybuilder/BuiltQuery.java | 26 +- .../querybuilder/BuiltQueryDecorator.java | 54 ++++- .../querybuilder/QueryStringBuilder.java | 8 + .../org/influxdb/querybuilder/Select.java | 228 +----------------- .../influxdb/querybuilder/SelectCoreImpl.java | 198 +++++++++++++++ .../querybuilder/SelectDecorator.java | 52 ++++ .../querybuilder/SelectQueryImpl.java | 94 ++++++++ .../querybuilder/SelectSubQueryImpl.java | 94 ++++++++ .../querybuilder/SelectWithSubquery.java | 4 + .../org/influxdb/querybuilder/Selection.java | 120 ++------- .../querybuilder/SelectionCoreImpl.java | 141 +++++++++++ .../querybuilder/SelectionQueryImpl.java | 114 +++++++++ .../querybuilder/SelectionSubQueryImpl.java | 121 ++++++++++ .../org/influxdb/querybuilder/SubQuery.java | 19 ++ .../org/influxdb/querybuilder/TimeZone.java | 23 ++ .../java/org/influxdb/querybuilder/Where.java | 26 ++ .../influxdb/querybuilder/WhereCoreImpl.java | 75 ++++++ .../influxdb/querybuilder/WhereNested.java | 41 ++++ .../influxdb/querybuilder/WhereQueryImpl.java | 77 ++++++ .../querybuilder/WhereSubQueryImpl.java | 104 ++++++++ .../influxdb/querybuilder/WithSubquery.java | 6 + .../querybuilder/clauses/NestedClause.java | 24 +- .../querybuilder/clauses/RegexClause.java | 8 +- .../clauses/SelectRegexClause.java | 13 + .../influxdb/querybuilder/BuiltQueryTest.java | 77 +++++- .../SelectionSubQueryImplTest.java | 205 ++++++++++++++++ 27 files changed, 1599 insertions(+), 357 deletions(-) create mode 100644 src/main/java/org/influxdb/querybuilder/QueryStringBuilder.java create mode 100644 src/main/java/org/influxdb/querybuilder/SelectCoreImpl.java create mode 100644 src/main/java/org/influxdb/querybuilder/SelectDecorator.java create mode 100644 src/main/java/org/influxdb/querybuilder/SelectQueryImpl.java create mode 100644 src/main/java/org/influxdb/querybuilder/SelectSubQueryImpl.java create mode 100644 src/main/java/org/influxdb/querybuilder/SelectWithSubquery.java create mode 100644 src/main/java/org/influxdb/querybuilder/SelectionCoreImpl.java create mode 100644 src/main/java/org/influxdb/querybuilder/SelectionQueryImpl.java create mode 100644 src/main/java/org/influxdb/querybuilder/SelectionSubQueryImpl.java create mode 100644 src/main/java/org/influxdb/querybuilder/SubQuery.java create mode 100644 src/main/java/org/influxdb/querybuilder/TimeZone.java create mode 100644 src/main/java/org/influxdb/querybuilder/Where.java create mode 100644 src/main/java/org/influxdb/querybuilder/WhereCoreImpl.java create mode 100644 src/main/java/org/influxdb/querybuilder/WhereNested.java create mode 100644 src/main/java/org/influxdb/querybuilder/WhereQueryImpl.java create mode 100644 src/main/java/org/influxdb/querybuilder/WhereSubQueryImpl.java create mode 100644 src/main/java/org/influxdb/querybuilder/WithSubquery.java create mode 100644 src/main/java/org/influxdb/querybuilder/clauses/SelectRegexClause.java create mode 100644 src/test/java/org/influxdb/querybuilder/SelectionSubQueryImplTest.java diff --git a/src/main/java/org/influxdb/querybuilder/Appender.java b/src/main/java/org/influxdb/querybuilder/Appender.java index 9aacc1a07..cc61b2b03 100644 --- a/src/main/java/org/influxdb/querybuilder/Appender.java +++ b/src/main/java/org/influxdb/querybuilder/Appender.java @@ -3,6 +3,7 @@ import java.util.List; import java.util.regex.Pattern; import org.influxdb.querybuilder.clauses.ConjunctionClause; +import org.influxdb.querybuilder.clauses.SelectRegexClause; public final class Appender { @@ -89,6 +90,9 @@ public static StringBuilder appendName(final Object name, final StringBuilder st appendName((String) name, stringBuilder); } else if (name instanceof Column) { appendName(((Column) name).getName(), stringBuilder); + } else if (name instanceof SelectRegexClause) { + SelectRegexClause selectRegexClause = (SelectRegexClause) name; + selectRegexClause.appendTo(stringBuilder); } else if (name instanceof Function) { Function functionCall = (Function) name; stringBuilder.append(functionCall.getName()).append('('); diff --git a/src/main/java/org/influxdb/querybuilder/BuiltQuery.java b/src/main/java/org/influxdb/querybuilder/BuiltQuery.java index 51c88751c..b9fc43787 100644 --- a/src/main/java/org/influxdb/querybuilder/BuiltQuery.java +++ b/src/main/java/org/influxdb/querybuilder/BuiltQuery.java @@ -15,7 +15,7 @@ import org.influxdb.querybuilder.clauses.RegexClause; import org.influxdb.querybuilder.clauses.SimpleClause; -public abstract class BuiltQuery extends Query { +public abstract class BuiltQuery extends Query implements QueryStringBuilder { public BuiltQuery(final String database) { super(null, database); @@ -25,17 +25,15 @@ public BuiltQuery(final String database, final boolean requiresPost) { super(null, database, requiresPost); } - abstract StringBuilder buildQueryString(); - - static StringBuilder addSemicolonIfNeeded(final StringBuilder stringBuilder) { - int length = moveToEndOfText(stringBuilder); + static StringBuilder addSemicolonIfMissing(final StringBuilder stringBuilder) { + int length = trimLast(stringBuilder); if (length == 0 || stringBuilder.charAt(length - 1) != ';') { stringBuilder.append(';'); } return stringBuilder; } - private static int moveToEndOfText(final StringBuilder stringBuilder) { + static int trimLast(final StringBuilder stringBuilder) { int length = stringBuilder.length(); while (length > 0 && stringBuilder.charAt(length - 1) <= ' ') { length -= 1; @@ -49,7 +47,7 @@ private static int moveToEndOfText(final StringBuilder stringBuilder) { @Override public String getCommand() { StringBuilder sb = buildQueryString(); - addSemicolonIfNeeded(sb); + addSemicolonIfMissing(sb); return sb.toString(); } @@ -67,16 +65,16 @@ public static final class QueryBuilder { private QueryBuilder() { } - public static Select.Builder select(final String... columns) { + public static SelectionQueryImpl select(final String... columns) { return select((Object[]) columns); } - public static Select.Builder select(final Object... columns) { - return new Select.Builder(Arrays.asList(columns)); - } - - public static Selection select() { - return new Selection(); + public static SelectionQueryImpl select(final Object... columns) { + WhereCoreImpl whereCore = new WhereCoreImpl(); + SelectCoreImpl selectCore = + new SelectCoreImpl(null, Arrays.asList(columns), false, whereCore); + whereCore.setStatement(selectCore); + return new SelectionQueryImpl(new SelectionCoreImpl()); } public static Clause eq(final String name, final Object value) { diff --git a/src/main/java/org/influxdb/querybuilder/BuiltQueryDecorator.java b/src/main/java/org/influxdb/querybuilder/BuiltQueryDecorator.java index c5cbe1b92..414d8e2d2 100644 --- a/src/main/java/org/influxdb/querybuilder/BuiltQueryDecorator.java +++ b/src/main/java/org/influxdb/querybuilder/BuiltQueryDecorator.java @@ -1,11 +1,17 @@ package org.influxdb.querybuilder; -public class BuiltQueryDecorator extends BuiltQuery { +import org.influxdb.querybuilder.clauses.Clause; - T query; +public abstract class BuiltQueryDecorator extends BuiltQuery + implements Select { - BuiltQueryDecorator(final T query) { + SelectQueryImpl query; + + public BuiltQueryDecorator() { super(null); + } + + public void setQuery(final T query) { this.query = query; } @@ -15,12 +21,52 @@ public String getCommand() { } @Override - StringBuilder buildQueryString() { + public StringBuilder buildQueryString() { return query.buildQueryString(); } + @Override + public StringBuilder buildQueryString(final StringBuilder stringBuilder) { + return query.buildQueryString(stringBuilder); + } + @Override public String getDatabase() { return query.getDatabase(); } + + @Override + public Where where() { + return query.where(); + } + + @Override + public Where where(final Clause clause) { + return query.where(clause); + } + + @Override + public Where where(final String text) { + return query.where(text); + } + + @Override + public SelectQueryImpl orderBy(final Ordering ordering) { + return query.orderBy(ordering); + } + + @Override + public SelectQueryImpl groupBy(final Object... columns) { + return query.groupBy(columns); + } + + @Override + public SelectQueryImpl limit(final int limit) { + return query.limit(limit); + } + + @Override + public SelectQueryImpl limit(final int limit, final long offSet) { + return query.limit(limit, offSet); + } } diff --git a/src/main/java/org/influxdb/querybuilder/QueryStringBuilder.java b/src/main/java/org/influxdb/querybuilder/QueryStringBuilder.java new file mode 100644 index 000000000..ef766728e --- /dev/null +++ b/src/main/java/org/influxdb/querybuilder/QueryStringBuilder.java @@ -0,0 +1,8 @@ +package org.influxdb.querybuilder; + +interface QueryStringBuilder { + + StringBuilder buildQueryString(final StringBuilder stringBuilder); + + StringBuilder buildQueryString(); +} diff --git a/src/main/java/org/influxdb/querybuilder/Select.java b/src/main/java/org/influxdb/querybuilder/Select.java index 0785b0e12..88058e65a 100644 --- a/src/main/java/org/influxdb/querybuilder/Select.java +++ b/src/main/java/org/influxdb/querybuilder/Select.java @@ -1,232 +1,26 @@ package org.influxdb.querybuilder; import org.influxdb.querybuilder.clauses.Clause; -import org.influxdb.querybuilder.clauses.RawTextClause; -import org.influxdb.querybuilder.clauses.ConjunctionClause; -import org.influxdb.querybuilder.clauses.AndConjunction; -import org.influxdb.querybuilder.clauses.OrConjunction; -import org.influxdb.querybuilder.clauses.NestedClause; -import static org.influxdb.querybuilder.Appender.appendName; -import static org.influxdb.querybuilder.Appender.joinAndAppend; -import static org.influxdb.querybuilder.Appender.joinAndAppendNames; +public interface Select { -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; + T where(); -public class Select extends BuiltQuery { + T where(final Clause clause); - private final String table; - private final boolean isDistinct; - private final List columns; - private final Where where; - private Ordering ordering; - private List groupByColumns; - private Integer limit; - private Long offSet; + T where(final String text); - Select( - final String database, - final String table, - final List columns, - final boolean isDistinct, - final boolean requiresPost) { - super(database, requiresPost); - this.table = table; - this.columns = columns; - this.isDistinct = isDistinct; - this.where = new Where(this); - } + T orderBy(final Ordering ordering); - @Override - StringBuilder buildQueryString() { - StringBuilder builder = new StringBuilder(); + T groupBy(final Object... columns); - builder.append("SELECT "); + T limit(final int limit); - if (isDistinct) { - if (columns.size() > 1) { - throw new IllegalStateException("DISTINCT function can only be used with one column"); - } - } + T limit(final int limit, final long offSet); - if (columns == null) { - builder.append('*'); - } else { - joinAndAppendNames(builder, columns); - } - builder.append(" FROM "); + T sLimit(final int sLimit); - appendName(table, builder); + T sLimit(final int sLimit, final long sOffSet); - if (!where.clauses.isEmpty()) { - builder.append(" WHERE "); - joinAndAppend(builder, where.clauses); - } - - if (groupByColumns != null) { - builder.append(" GROUP BY "); - joinAndAppendNames(builder, groupByColumns); - } - - if (ordering != null) { - builder.append(" ORDER BY "); - joinAndAppend(builder, ",", Collections.singletonList(ordering)); - } - - if (limit != null) { - builder.append(" LIMIT ").append(limit); - } - - if (offSet != null) { - builder.append(" OFFSET ").append(offSet); - } - - return builder; - } - - public Where where() { - return where; - } - - public Where where(final Clause clause) { - return where.and(clause); - } - - public Where where(final String text) { - return where.and(new RawTextClause(text)); - } - - public Select orderBy(final Ordering ordering) { - - this.ordering = ordering; - return this; - } - - public Select groupBy(final Object... columns) { - this.groupByColumns = Arrays.asList(columns); - return this; - } - - public Select limit(final int limit) { - if (limit <= 0) { - throw new IllegalArgumentException("Invalid LIMIT value, must be strictly positive"); - } - - if (this.limit != null) { - throw new IllegalStateException("A LIMIT value has already been provided"); - } - - this.limit = limit; - return this; - } - - public Select limit(final int limit, final long offSet) { - if (limit <= 0 || offSet <= 0) { - throw new IllegalArgumentException( - "Invalid LIMIT and OFFSET Value, must be strictly positive"); - } - - this.limit = limit; - this.offSet = offSet; - return this; - } - - public static class Where extends BuiltQueryDecorator