WARNING: This class maintains an internal state in terms of {@link + * java.util.LinkedHashMap} and {@link java.util.LinkedHashSet} which gets updated on every method + * call performing CRUD operations to record the mutations. Since {@link java.util.LinkedHashMap} is + * not thread safe as per its documentation, + * This class too should not be treated as a thread safe class. */ +@NotThreadSafe public interface Batch extends DatastoreBatchWriter { interface Response { diff --git a/google-cloud-datastore/src/main/java/com/google/cloud/datastore/DatastoreBatchWriter.java b/google-cloud-datastore/src/main/java/com/google/cloud/datastore/DatastoreBatchWriter.java index d03d90af8..db4bd3179 100644 --- a/google-cloud-datastore/src/main/java/com/google/cloud/datastore/DatastoreBatchWriter.java +++ b/google-cloud-datastore/src/main/java/com/google/cloud/datastore/DatastoreBatchWriter.java @@ -17,11 +17,20 @@ package com.google.cloud.datastore; import java.util.List; +import javax.annotation.concurrent.NotThreadSafe; /** * An interface to represent a batch of write operations. All write operation for a batch writer * will be applied to the Datastore in one RPC call. + * + *
WARNING: This class maintains an internal state in terms of {@link + * java.util.LinkedHashMap} and {@link java.util.LinkedHashSet} which gets updated on every method + * call performing CRUD operations to record the mutations. Since {@link java.util.LinkedHashMap} is + * not thread safe as per its documentation, + * This class too should not be treated as a thread safe class. */ +@NotThreadSafe public interface DatastoreBatchWriter extends DatastoreWriter { /** diff --git a/google-cloud-datastore/src/main/java/com/google/cloud/datastore/Transaction.java b/google-cloud-datastore/src/main/java/com/google/cloud/datastore/Transaction.java index 9880b4748..69c18d75c 100644 --- a/google-cloud-datastore/src/main/java/com/google/cloud/datastore/Transaction.java +++ b/google-cloud-datastore/src/main/java/com/google/cloud/datastore/Transaction.java @@ -19,14 +19,15 @@ import com.google.protobuf.ByteString; import java.util.Iterator; import java.util.List; +import javax.annotation.concurrent.NotThreadSafe; /** * A Google cloud datastore transaction. Similar to {@link Batch} any write operation that is * applied on a transaction will only be sent to the Datastore upon {@link #commit}. A call to * {@link #rollback} will invalidate the transaction and discard the changes. Any read operation * that is done by a transaction will be part of it and therefore a {@code commit} is guaranteed to - * fail if an entity was modified outside of the transaction after it was read. Write operation on - * this transaction will not be reflected by read operation (as the changes are only sent to the + * fail if an entity was modified outside the transaction after it was read. Write operation on this + * transaction will not be reflected by read operation (as the changes are only sent to the * Datastore upon {@code commit}. A usage example: * *
{@code
@@ -52,7 +53,14 @@
*
* @see Google Cloud
* Datastore transactions
+ * WARNING: This class maintains an internal state in terms of {@link
+ * java.util.LinkedHashMap} and {@link java.util.LinkedHashSet} which gets updated on every
+ * method call performing CRUD operations to record the mutations. Since {@link
+ * java.util.LinkedHashMap} is not thread safe as per its documentation,
+ * This class too should not be treated as a thread safe class.
*/
+@NotThreadSafe
public interface Transaction extends DatastoreBatchWriter, DatastoreReaderWriter {
interface Response {
diff --git a/google-cloud-datastore/src/test/java/com/google/cloud/datastore/it/ITDatastoreConceptsTest.java b/google-cloud-datastore/src/test/java/com/google/cloud/datastore/it/ITDatastoreConceptsTest.java
new file mode 100644
index 000000000..b8ebd277a
--- /dev/null
+++ b/google-cloud-datastore/src/test/java/com/google/cloud/datastore/it/ITDatastoreConceptsTest.java
@@ -0,0 +1,996 @@
+/*
+ * Copyright 2023 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.cloud.datastore.it;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import com.google.cloud.Timestamp;
+import com.google.cloud.datastore.Cursor;
+import com.google.cloud.datastore.Datastore;
+import com.google.cloud.datastore.DatastoreException;
+import com.google.cloud.datastore.DatastoreOptions;
+import com.google.cloud.datastore.Entity;
+import com.google.cloud.datastore.EntityQuery;
+import com.google.cloud.datastore.FullEntity;
+import com.google.cloud.datastore.IncompleteKey;
+import com.google.cloud.datastore.Key;
+import com.google.cloud.datastore.KeyFactory;
+import com.google.cloud.datastore.KeyQuery;
+import com.google.cloud.datastore.ListValue;
+import com.google.cloud.datastore.PathElement;
+import com.google.cloud.datastore.ProjectionEntity;
+import com.google.cloud.datastore.Query;
+import com.google.cloud.datastore.QueryResults;
+import com.google.cloud.datastore.ReadOption;
+import com.google.cloud.datastore.StringValue;
+import com.google.cloud.datastore.StructuredQuery;
+import com.google.cloud.datastore.StructuredQuery.CompositeFilter;
+import com.google.cloud.datastore.StructuredQuery.OrderBy;
+import com.google.cloud.datastore.StructuredQuery.PropertyFilter;
+import com.google.cloud.datastore.Transaction;
+import com.google.cloud.datastore.testing.RemoteDatastoreHelper;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSet;
+import com.google.common.collect.Iterators;
+import com.google.datastore.v1.TransactionOptions;
+import com.google.datastore.v1.TransactionOptions.ReadOnly;
+import java.time.LocalDateTime;
+import java.time.Month;
+import java.time.ZoneOffset;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+/*
+ * Test created based on ConceptsTest to run against GraalVM checks
+ */
+public class ITDatastoreConceptsTest {
+ private static final RemoteDatastoreHelper HELPER = RemoteDatastoreHelper.create();
+ private static final DatastoreOptions OPTIONS = HELPER.getOptions();
+ private static final FullEntity TEST_FULL_ENTITY = FullEntity.newBuilder().build();
+ private Datastore datastore;
+ private KeyFactory keyFactory;
+ private Key taskKey;
+ private Entity testEntity;
+ private Timestamp startDate;
+ private Timestamp endDate;
+ private Timestamp includedDate;
+
+ private static final String TASK_CONCEPTS = "TaskConcepts";
+
+ /**
+ * Initializes Datastore and cleans out any residual values. Also initializes global variables
+ * used for testing.
+ */
+ @Before
+ public void setUp() {
+ datastore = OPTIONS.getService();
+ StructuredQuery query = Query.newKeyQueryBuilder().build();
+ QueryResults result = datastore.run(query);
+ datastore.delete(Iterators.toArray(result, Key.class));
+ keyFactory = datastore.newKeyFactory().setKind(TASK_CONCEPTS);
+ taskKey = keyFactory.newKey("some-arbitrary-key");
+ testEntity = Entity.newBuilder(taskKey, TEST_FULL_ENTITY).build();
+ startDate =
+ Timestamp.ofTimeSecondsAndNanos(
+ LocalDateTime.of(1990, Month.JANUARY, 1, 1, 1, 1)
+ .toInstant(ZoneOffset.UTC)
+ .getEpochSecond(),
+ 0);
+ endDate =
+ Timestamp.ofTimeSecondsAndNanos(
+ LocalDateTime.of(2000, Month.JANUARY, 1, 1, 1, 1)
+ .toInstant(ZoneOffset.UTC)
+ .getEpochSecond(),
+ 0);
+ includedDate =
+ Timestamp.ofTimeSecondsAndNanos(
+ LocalDateTime.of(1999, Month.DECEMBER, 31, 1, 1, 1)
+ .toInstant(ZoneOffset.UTC)
+ .getEpochSecond(),
+ 0);
+ setUpQueryTests();
+ }
+
+ @After
+ public void tearDown() {
+ KeyQuery taskQuery = Query.newKeyQueryBuilder().setKind(TASK_CONCEPTS).build();
+ Key[] taskKeysToDelete = Iterators.toArray(datastore.run(taskQuery), Key.class);
+ datastore.delete(taskKeysToDelete);
+ }
+
+ private void assertValidKey(Key taskKey) {
+ datastore.put(Entity.newBuilder(taskKey, TEST_FULL_ENTITY).build());
+ }
+
+ private void assertValidEntity(Entity original) {
+ datastore.put(original);
+ assertEquals(original, datastore.get(original.getKey()));
+ }
+
+ private List setUpBatchTests(Key taskKey1, Key taskKey2) {
+ Entity task1 =
+ Entity.newBuilder(taskKey1)
+ .set("category", "Personal")
+ .set("done", false)
+ .set("priority", 4)
+ .set("description", "Learn Cloud Datastore")
+ .build();
+ Entity task2 =
+ Entity.newBuilder(taskKey2)
+ .set("category", "Personal")
+ .set("done", false)
+ .set("priority", 5)
+ .set("description", "Integrate Cloud Datastore")
+ .build();
+ datastore.put(task1, task2);
+ return ImmutableList.of(task1, task2);
+ }
+
+ private void setUpQueryTests() {
+ Key taskKey =
+ datastore
+ .newKeyFactory()
+ .setKind(TASK_CONCEPTS)
+ .addAncestors(PathElement.of("TaskList", "default"))
+ .newKey("someTask");
+ datastore.put(
+ Entity.newBuilder(taskKey)
+ .set("category", "Personal")
+ .set("done", false)
+ .set("completed", false)
+ .set("priority", 4)
+ .set("created", includedDate)
+ .set("percent_complete", 10.0)
+ .set(
+ "description",
+ StringValue.newBuilder("Learn Cloud Datastore").setExcludeFromIndexes(true).build())
+ .set("tag", "fun", "l", "programming", "learn")
+ .build());
+ }
+
+ private void assertValidQuery(Query query) {
+ QueryResults results = datastore.run(query);
+ assertTrue(results.hasNext());
+ results.next();
+ assertFalse(results.hasNext());
+ }
+
+ private void assertInvalidQuery(Query query) {
+ try {
+ datastore.run(query);
+ fail("should not reach here");
+ } catch (DatastoreException ignored) {
+ }
+ }
+
+ private List setUpTransferTests() {
+ KeyFactory keyFactory = datastore.newKeyFactory().setKind("People");
+ Key from = keyFactory.newKey("from");
+ Key to = keyFactory.newKey("to");
+ datastore.put(Entity.newBuilder(from).set("balance", 100).build());
+ datastore.put(Entity.newBuilder(to).set("balance", 0).build());
+ return ImmutableList.of(from, to);
+ }
+
+ private void assertSuccessfulTransfer(Key from, Key to) {
+ assertEquals(90, datastore.get(from).getLong("balance"));
+ assertEquals(10, datastore.get(to).getLong("balance"));
+ }
+
+ @Test
+ public void testIncompleteKey() {
+ KeyFactory keyFactory = datastore.newKeyFactory().setKind(TASK_CONCEPTS);
+ Key taskKey = datastore.allocateId(keyFactory.newKey());
+ assertValidKey(taskKey);
+ }
+
+ @Test
+ public void testNamedKey() {
+ Key taskKey = datastore.newKeyFactory().setKind(TASK_CONCEPTS).newKey("sampleTask");
+ assertValidKey(taskKey);
+ }
+
+ @Test
+ public void testKeyWithParent() {
+ Key taskKey =
+ datastore
+ .newKeyFactory()
+ .addAncestors(PathElement.of("TaskList", "default"))
+ .setKind(TASK_CONCEPTS)
+ .newKey("sampleTask");
+ assertValidKey(taskKey);
+ }
+
+ @Test
+ public void testKeyWithMultilevelParent() {
+ KeyFactory keyFactory =
+ datastore
+ .newKeyFactory()
+ .addAncestors(PathElement.of("User", "Alice"), PathElement.of("TaskList", "default"))
+ .setKind(TASK_CONCEPTS);
+ Key taskKey = keyFactory.newKey("sampleTask");
+ assertValidKey(taskKey);
+ }
+
+ @Test
+ public void testEntityWithParent() {
+ Key taskKey =
+ datastore
+ .newKeyFactory()
+ .addAncestors(PathElement.of("TaskList", "default"))
+ .setKind(TASK_CONCEPTS)
+ .newKey("sampleTask");
+ Entity task =
+ Entity.newBuilder(taskKey)
+ .set("category", "Personal")
+ .set("done", false)
+ .set("priority", 4)
+ .set("description", "Learn Cloud Datastore")
+ .build();
+ assertValidEntity(task);
+ }
+
+ @Test
+ public void testProperties() {
+ Entity task =
+ Entity.newBuilder(taskKey)
+ .set("category", "Personal")
+ .set("created", Timestamp.now())
+ .set("done", false)
+ .set("priority", 4)
+ .set("percent_complete", 10.0)
+ .set(
+ "description",
+ StringValue.newBuilder("Learn Cloud Datastore").setExcludeFromIndexes(true).build())
+ .build();
+ assertValidEntity(task);
+ }
+
+ @Test
+ public void testArrayValue() {
+ Entity task =
+ Entity.newBuilder(taskKey)
+ .set("tags", "fun", "programming")
+ .set("collaborators", ListValue.of("alice", "bob"))
+ .build();
+ assertValidEntity(task);
+ }
+
+ @Test
+ public void testBasicEntity() {
+ Key taskKey = datastore.newKeyFactory().setKind(TASK_CONCEPTS).newKey("sampleTask");
+ Entity task =
+ Entity.newBuilder(taskKey)
+ .set("category", "Personal")
+ .set("done", false)
+ .set("priority", 4)
+ .set("description", "Learn Cloud Datastore")
+ .build();
+ assertValidEntity(task);
+ }
+
+ @Test
+ public void testUpsert() {
+ Entity task = Entity.newBuilder(keyFactory.newKey("sampleTask")).build();
+ datastore.put(task);
+ assertEquals(task, datastore.get(task.getKey()));
+ }
+
+ @Test
+ public void testInsert() {
+ Key taskKey = datastore.add(FullEntity.newBuilder(keyFactory.newKey()).build()).getKey();
+ assertEquals(FullEntity.newBuilder(taskKey).build(), datastore.get(taskKey));
+ }
+
+ @Test
+ public void testLookup() {
+ datastore.put(testEntity);
+ Entity task = datastore.get(taskKey);
+ assertEquals(testEntity, task);
+ }
+
+ @Test
+ public void testUpdate() {
+ datastore.put(testEntity);
+ Entity task = Entity.newBuilder(datastore.get(taskKey)).set("priority", 5).build();
+ datastore.update(task);
+ assertEquals(task, datastore.get(taskKey));
+ }
+
+ @Test
+ public void testDelete() {
+ datastore.put(testEntity);
+ datastore.delete(taskKey);
+ assertNull(datastore.get(taskKey));
+ }
+
+ @Test
+ public void testBatchUpsert() {
+ FullEntity task1 =
+ FullEntity.newBuilder(keyFactory.newKey())
+ .set("category", "Personal")
+ .set("done", false)
+ .set("priority", 4)
+ .set("description", "Learn Cloud Datastore")
+ .build();
+ FullEntity task2 =
+ Entity.newBuilder(keyFactory.newKey())
+ .set("category", "Personal")
+ .set("done", false)
+ .set("priority", 5)
+ .set("description", "Integrate Cloud Datastore")
+ .build();
+ List tasks = datastore.add(task1, task2);
+ Key taskKey1 = tasks.get(0).getKey();
+ Key taskKey2 = tasks.get(1).getKey();
+ assertEquals(Entity.newBuilder(taskKey1, task1).build(), datastore.get(taskKey1));
+ assertEquals(Entity.newBuilder(taskKey2, task2).build(), datastore.get(taskKey2));
+ }
+
+ @Test
+ public void testBatchLookup() {
+ Key taskKey1 = keyFactory.newKey(1);
+ Key taskKey2 = keyFactory.newKey(2);
+ List expectedTasks = setUpBatchTests(taskKey1, taskKey2);
+ Iterator tasks = datastore.get(taskKey1, taskKey2);
+ assertEquals(expectedTasks.get(0), tasks.next());
+ assertEquals(expectedTasks.get(1), tasks.next());
+ }
+
+ @Test
+ public void testBatchDelete() {
+ Key taskKey1 = keyFactory.newKey(1);
+ Key taskKey2 = keyFactory.newKey(2);
+ setUpBatchTests(taskKey1, taskKey2);
+ datastore.delete(taskKey1, taskKey2);
+ assertNull(datastore.get(taskKey1));
+ assertNull(datastore.get(taskKey2));
+ }
+
+ @Test
+ public void testBasicQuery() {
+ Query query =
+ Query.newEntityQueryBuilder()
+ .setKind(TASK_CONCEPTS)
+ .setFilter(
+ CompositeFilter.and(
+ PropertyFilter.eq("done", false), PropertyFilter.ge("priority", 4)))
+ .setOrderBy(OrderBy.desc("priority"))
+ .build();
+ assertValidQuery(query);
+ }
+
+ @Test
+ public void testRunQuery() {
+ Query query = Query.newEntityQueryBuilder().setKind(TASK_CONCEPTS).build();
+ QueryResults tasks = datastore.run(query);
+ assertNotNull(tasks.next());
+ assertFalse(tasks.hasNext());
+ }
+
+ @Test
+ public void testPropertyFilter() {
+ Query query =
+ Query.newEntityQueryBuilder()
+ .setKind(TASK_CONCEPTS)
+ .setFilter(PropertyFilter.eq("done", false))
+ .build();
+ assertValidQuery(query);
+ }
+
+ @Test
+ public void testCompositeFilter() {
+ Query query =
+ Query.newEntityQueryBuilder()
+ .setKind(TASK_CONCEPTS)
+ .setFilter(
+ CompositeFilter.and(
+ PropertyFilter.eq("done", false), PropertyFilter.eq("priority", 4)))
+ .build();
+ assertValidQuery(query);
+ }
+
+ @Test
+ public void testKeyFilter() {
+ Query query =
+ Query.newEntityQueryBuilder()
+ .setKind(TASK_CONCEPTS)
+ .setFilter(StructuredQuery.PropertyFilter.gt("__key__", keyFactory.newKey("someTask")))
+ .build();
+ assertValidQuery(query);
+ }
+
+ @Test
+ public void testAscendingSort() {
+ Query query =
+ Query.newEntityQueryBuilder()
+ .setKind(TASK_CONCEPTS)
+ .setOrderBy(OrderBy.asc("created"))
+ .build();
+ assertValidQuery(query);
+ }
+
+ @Test
+ public void testDescendingSort() {
+ Query query =
+ Query.newEntityQueryBuilder()
+ .setKind(TASK_CONCEPTS)
+ .setOrderBy(OrderBy.desc("created"))
+ .build();
+ assertValidQuery(query);
+ }
+
+ @Test
+ public void testMultiSort() {
+ Query query =
+ Query.newEntityQueryBuilder()
+ .setKind(TASK_CONCEPTS)
+ .setOrderBy(OrderBy.desc("priority"), OrderBy.asc("created"))
+ .build();
+ assertValidQuery(query);
+ }
+
+ @Test
+ public void testKindlessQuery() {
+ Key lastSeenKey = keyFactory.newKey("a");
+
+ Query query =
+ Query.newEntityQueryBuilder().setFilter(PropertyFilter.gt("__key__", lastSeenKey)).build();
+ assertValidQuery(query);
+ }
+
+ @Test
+ public void testAncestorQuery() {
+ Query query =
+ Query.newEntityQueryBuilder()
+ .setKind(TASK_CONCEPTS)
+ .setFilter(
+ PropertyFilter.hasAncestor(
+ datastore.newKeyFactory().setKind("TaskList").newKey("default")))
+ .build();
+ assertValidQuery(query);
+ }
+
+ @Test
+ public void testProjectionQuery() {
+ Query query =
+ Query.newProjectionEntityQueryBuilder()
+ .setKind(TASK_CONCEPTS)
+ .setProjection("priority", "percent_complete")
+ .build();
+ assertValidQuery(query);
+ }
+
+ @Test
+ public void testRunProjectionQuery() {
+ Query query =
+ Query.newProjectionEntityQueryBuilder()
+ .setKind(TASK_CONCEPTS)
+ .setProjection("priority", "percent_complete")
+ .build();
+ List priorities = new LinkedList<>();
+ List percentCompletes = new LinkedList<>();
+ QueryResults tasks = datastore.run(query);
+ while (tasks.hasNext()) {
+ ProjectionEntity task = tasks.next();
+ priorities.add(task.getLong("priority"));
+ percentCompletes.add(task.getDouble("percent_complete"));
+ }
+ assertEquals(ImmutableList.of(4L), priorities);
+ assertEquals(ImmutableList.of(10.0), percentCompletes);
+ }
+
+ @Test
+ public void testKeysOnlyQuery() {
+ Query query = Query.newKeyQueryBuilder().setKind(TASK_CONCEPTS).build();
+ assertValidQuery(query);
+ }
+
+ @Test
+ public void testDistinctOnQuery() {
+ Query query =
+ Query.newProjectionEntityQueryBuilder()
+ .setKind(TASK_CONCEPTS)
+ .setProjection("category", "priority")
+ .setDistinctOn("category")
+ .setOrderBy(OrderBy.asc("category"), OrderBy.asc("priority"))
+ .build();
+ assertValidQuery(query);
+ }
+
+ @Test
+ public void testArrayValueInequalityRange() {
+ Query query =
+ Query.newEntityQueryBuilder()
+ .setKind(TASK_CONCEPTS)
+ .setFilter(
+ CompositeFilter.and(
+ PropertyFilter.gt("tag", "learn"), PropertyFilter.lt("tag", "math")))
+ .build();
+ QueryResults results = datastore.run(query);
+ assertFalse(results.hasNext());
+ }
+
+ @Test
+ public void testArrayValueEquality() {
+ Query query =
+ Query.newEntityQueryBuilder()
+ .setKind(TASK_CONCEPTS)
+ .setFilter(
+ CompositeFilter.and(
+ PropertyFilter.eq("tag", "fun"), PropertyFilter.eq("tag", "programming")))
+ .build();
+ assertValidQuery(query);
+ }
+
+ @Test
+ public void testInequalityRange() {
+ Query query =
+ Query.newEntityQueryBuilder()
+ .setKind(TASK_CONCEPTS)
+ .setFilter(
+ CompositeFilter.and(
+ PropertyFilter.gt("created", startDate), PropertyFilter.lt("created", endDate)))
+ .build();
+ assertValidQuery(query);
+ }
+
+ @Test
+ public void testInequalityInvalid() {
+ Query query =
+ Query.newEntityQueryBuilder()
+ .setKind(TASK_CONCEPTS)
+ .setFilter(
+ CompositeFilter.and(
+ PropertyFilter.gt("created", startDate), PropertyFilter.gt("priority", 3)))
+ .build();
+ assertInvalidQuery(query);
+ }
+
+ @Test
+ public void testEqualAndInequalityRange() {
+ Query query =
+ Query.newEntityQueryBuilder()
+ .setKind(TASK_CONCEPTS)
+ .setFilter(
+ CompositeFilter.and(
+ PropertyFilter.eq("priority", 4),
+ PropertyFilter.gt("created", startDate),
+ PropertyFilter.lt("created", endDate)))
+ .build();
+ assertValidQuery(query);
+ }
+
+ @Test
+ public void testInequalitySort() {
+ Query query =
+ Query.newEntityQueryBuilder()
+ .setKind(TASK_CONCEPTS)
+ .setFilter(PropertyFilter.gt("priority", 3))
+ .setOrderBy(OrderBy.asc("priority"), OrderBy.asc("created"))
+ .build();
+ assertValidQuery(query);
+ }
+
+ @Test
+ public void testInequalitySortInvalidNotSame() {
+ Query query =
+ Query.newEntityQueryBuilder()
+ .setKind(TASK_CONCEPTS)
+ .setFilter(PropertyFilter.gt("priority", 3))
+ .setOrderBy(OrderBy.asc("created"))
+ .build();
+ assertInvalidQuery(query);
+ }
+
+ @Test
+ public void testInequalitySortInvalidNotFirst() {
+ Query query =
+ Query.newEntityQueryBuilder()
+ .setKind(TASK_CONCEPTS)
+ .setFilter(PropertyFilter.gt("priority", 3))
+ .setOrderBy(OrderBy.asc("created"), OrderBy.asc("priority"))
+ .build();
+ assertInvalidQuery(query);
+ }
+
+ @Test
+ public void testLimit() {
+ Query query = Query.newEntityQueryBuilder().setKind(TASK_CONCEPTS).setLimit(5).build();
+ assertValidQuery(query);
+ }
+
+ @Test
+ public void testCursorPaging() {
+ datastore.put(testEntity);
+ Cursor nextPageCursor = cursorPaging(1, null);
+ assertNotNull(nextPageCursor);
+ nextPageCursor = cursorPaging(1, nextPageCursor);
+ assertNotNull(nextPageCursor);
+ }
+
+ private Cursor cursorPaging(int pageSize, Cursor pageCursor) {
+ EntityQuery.Builder queryBuilder =
+ Query.newEntityQueryBuilder().setKind(TASK_CONCEPTS).setLimit(pageSize);
+ if (pageCursor != null) {
+ queryBuilder.setStartCursor(pageCursor);
+ }
+ QueryResults tasks = datastore.run(queryBuilder.build());
+ while (tasks.hasNext()) {
+ Entity task = tasks.next();
+ // do something with the task
+ }
+ Cursor nextPageCursor = tasks.getCursorAfter();
+ return nextPageCursor;
+ }
+
+ @Test
+ public void testEventualConsistentQuery() {
+ Query query =
+ Query.newEntityQueryBuilder()
+ .setKind(TASK_CONCEPTS)
+ .setFilter(
+ PropertyFilter.hasAncestor(
+ datastore.newKeyFactory().setKind("TaskList").newKey("default")))
+ .build();
+ datastore.run(query, ReadOption.eventualConsistency());
+ assertValidQuery(query);
+ }
+
+ @Test
+ public void testUnindexedPropertyQuery() {
+ Query query =
+ Query.newEntityQueryBuilder()
+ .setKind(TASK_CONCEPTS)
+ .setFilter(PropertyFilter.eq("description", "A task description"))
+ .build();
+ QueryResults results = datastore.run(query);
+ assertFalse(results.hasNext());
+ }
+
+ @Test
+ public void testExplodingProperties() {
+ Entity task =
+ Entity.newBuilder(taskKey)
+ .set("tags", "fun", "programming", "learn")
+ .set("collaborators", "alice", "bob", "charlie")
+ .set("created", Timestamp.now())
+ .build();
+ assertValidEntity(task);
+ }
+
+ @Test
+ public void testTransactionalUpdate() {
+ List keys = setUpTransferTests();
+ transferFunds(keys.get(0), keys.get(1), 10);
+ assertSuccessfulTransfer(keys.get(0), keys.get(1));
+ }
+
+ void transferFunds(Key fromKey, Key toKey, long amount) {
+ Transaction txn = datastore.newTransaction();
+ try {
+ List entities = txn.fetch(fromKey, toKey);
+ Entity from = entities.get(0);
+ Entity updatedFrom =
+ Entity.newBuilder(from).set("balance", from.getLong("balance") - amount).build();
+ Entity to = entities.get(1);
+ Entity updatedTo =
+ Entity.newBuilder(to).set("balance", to.getLong("balance") + amount).build();
+ txn.put(updatedFrom, updatedTo);
+ txn.commit();
+ } finally {
+ if (txn.isActive()) {
+ txn.rollback();
+ }
+ }
+ }
+
+ @Test
+ public void testTransactionalRetry() {
+ List keys = setUpTransferTests();
+ Key fromKey = keys.get(0);
+ Key toKey = keys.get(1);
+ int retries = 5;
+ while (true) {
+ try {
+ transferFunds(fromKey, toKey, 10);
+ break;
+ } catch (DatastoreException e) {
+ if (retries == 0) {
+ throw e;
+ }
+ --retries;
+ }
+ }
+ assertSuccessfulTransfer(keys.get(0), keys.get(1));
+ }
+
+ @Test
+ public void testTransactionalGetOrCreate() {
+ Entity task;
+ Transaction txn = datastore.newTransaction();
+ try {
+ task = txn.get(taskKey);
+ if (task == null) {
+ task = Entity.newBuilder(taskKey).build();
+ txn.put(task);
+ txn.commit();
+ }
+ } finally {
+ if (txn.isActive()) {
+ txn.rollback();
+ }
+ }
+ assertEquals(task, datastore.get(taskKey));
+ }
+
+ @Test
+ public void testTransactionalSingleEntityGroupReadOnly() {
+
+ Key taskListKey = datastore.newKeyFactory().setKind("TaskList").newKey("default");
+ Entity taskListEntity = Entity.newBuilder(taskListKey).build();
+ datastore.put(taskListEntity);
+ Entity taskList;
+ QueryResults tasks;
+ Transaction txn =
+ datastore.newTransaction(
+ TransactionOptions.newBuilder().setReadOnly(ReadOnly.newBuilder().build()).build());
+ try {
+ taskList = txn.get(taskListKey);
+ Query query =
+ Query.newEntityQueryBuilder()
+ .setKind(TASK_CONCEPTS)
+ .setFilter(PropertyFilter.hasAncestor(taskListKey))
+ .build();
+ tasks = txn.run(query);
+ txn.commit();
+ } finally {
+ if (txn.isActive()) {
+ txn.rollback();
+ }
+ }
+ assertEquals(taskListEntity, taskList);
+ assertNotNull(tasks.next());
+ assertFalse(tasks.hasNext());
+ }
+
+ @Test
+ public void testNamespaceRunQuery() {
+ KeyFactory keyFactory = datastore.newKeyFactory().setKind("__namespace__");
+ Key namespaceKey = keyFactory.newKey(OPTIONS.getNamespace());
+ Query query =
+ Query.newKeyQueryBuilder()
+ .setKind("__namespace__")
+ .setFilter(CompositeFilter.and(PropertyFilter.eq("__key__", namespaceKey)))
+ .build();
+ List namespaces = new ArrayList<>();
+ QueryResults results = datastore.run(query);
+ while (results.hasNext()) {
+ namespaces.add(results.next().getName());
+ }
+ assertEquals(ImmutableList.of(OPTIONS.getNamespace()), namespaces);
+ }
+
+ @Test
+ public void testKindRunQuery() {
+ Query query = Query.newKeyQueryBuilder().setKind("__kind__").build();
+ List kinds = new ArrayList<>();
+ QueryResults results = datastore.run(query);
+ while (results.hasNext()) {
+ kinds.add(results.next().getName());
+ }
+ assertEquals(ImmutableList.of(TASK_CONCEPTS), kinds);
+ }
+
+ @Test
+ public void testPropertyRunQuery() {
+ Query query = Query.newKeyQueryBuilder().setKind("__property__").build();
+ QueryResults keys = datastore.run(query);
+ Map> propertiesByKind = new HashMap<>();
+ while (keys.hasNext()) {
+ Key key = keys.next();
+ String kind = key.getParent().getName();
+ String propertyName = key.getName();
+ Collection properties = propertiesByKind.computeIfAbsent(kind, k -> new HashSet<>());
+ properties.add(propertyName);
+ }
+ Map> expected =
+ ImmutableMap.of(
+ TASK_CONCEPTS,
+ ImmutableSet.of(
+ "done", "category", "completed", "priority", "created", "percent_complete", "tag"));
+ assertEquals(expected, propertiesByKind);
+ }
+
+ @Test
+ public void testPropertyByKindRunQuery() {
+ Key key = datastore.newKeyFactory().setKind("__kind__").newKey(TASK_CONCEPTS);
+ Query query =
+ Query.newEntityQueryBuilder()
+ .setKind("__property__")
+ .setFilter(PropertyFilter.hasAncestor(key))
+ .build();
+ QueryResults results = datastore.run(query);
+ Map> representationsByProperty = new HashMap<>();
+ while (results.hasNext()) {
+ Entity result = results.next();
+ String propertyName = result.getKey().getName();
+ List representations = result.getList("property_representation");
+ Collection currentRepresentations =
+ representationsByProperty.computeIfAbsent(propertyName, k -> new HashSet<>());
+ for (StringValue value : representations) {
+ currentRepresentations.add(value.get());
+ }
+ }
+ Map> expected =
+ ImmutableMap.>builder()
+ .put("category", Collections.singleton("STRING"))
+ .put("done", Collections.singleton("BOOLEAN"))
+ .put("completed", Collections.singleton("BOOLEAN"))
+ .put("priority", Collections.singleton("INT64"))
+ .put("created", Collections.singleton("INT64"))
+ .put("percent_complete", Collections.singleton("DOUBLE"))
+ .put("tag", Collections.singleton("STRING"))
+ .build();
+ assertEquals(expected, representationsByProperty);
+ }
+
+ @Test
+ public void testPropertyFilteringRunQuery() {
+ Key startKey =
+ datastore
+ .newKeyFactory()
+ .setKind("__property__")
+ .addAncestors(PathElement.of("__kind__", TASK_CONCEPTS))
+ .newKey("priority");
+ Query query =
+ Query.newKeyQueryBuilder()
+ .setKind("__property__")
+ .setFilter(PropertyFilter.ge("__key__", startKey))
+ .build();
+ Map> propertiesByKind = new HashMap<>();
+ QueryResults keys = datastore.run(query);
+ while (keys.hasNext()) {
+ Key key = keys.next();
+ String kind = key.getParent().getName();
+ String propertyName = key.getName();
+ Collection properties = propertiesByKind.computeIfAbsent(kind, k -> new HashSet<>());
+ properties.add(propertyName);
+ }
+ Map> expected =
+ ImmutableMap.of(TASK_CONCEPTS, ImmutableSet.of("priority", "tag"));
+ assertEquals(expected, propertiesByKind);
+ }
+
+ @Test
+ public void testEqQuerySorted() {
+ Query query =
+ Query.newEntityQueryBuilder()
+ .setKind(TASK_CONCEPTS)
+ .setFilter(PropertyFilter.eq("tag", "learn"))
+ .setOrderBy(OrderBy.asc("tag"))
+ .build();
+ assertValidQuery(query);
+ }
+
+ private void setUpQueryTestsRealBackend() {
+ Key taskKey =
+ datastore
+ .newKeyFactory()
+ .setKind(TASK_CONCEPTS)
+ .addAncestors(PathElement.of("TaskList", "default"))
+ .newKey("someTask");
+ datastore.put(
+ Entity.newBuilder(taskKey)
+ .set("category", "Personal")
+ .set("done", false)
+ .set("completed", false)
+ .set("priority", 4)
+ .set("created", includedDate)
+ .set("percent_complete", 10.0)
+ .set(
+ "description",
+ StringValue.newBuilder("Learn Cloud Datastore").setExcludeFromIndexes(true).build())
+ .set("tag", "fun", "l", "programming", "learn")
+ .build());
+ }
+
+ @Test
+ public void testInQuery() {
+ setUpQueryTestsRealBackend();
+ Query query =
+ Query.newEntityQueryBuilder()
+ .setKind(TASK_CONCEPTS)
+ .setFilter(PropertyFilter.in("tag", ListValue.of("learn", "study")))
+ .build();
+ assertValidQuery(query);
+ }
+
+ @Test
+ public void testNotEqualsQuery() {
+ setUpQueryTestsRealBackend();
+ Query query =
+ Query.newEntityQueryBuilder()
+ .setKind(TASK_CONCEPTS)
+ .setFilter(PropertyFilter.neq("category", "Work"))
+ .build();
+ assertValidQuery(query);
+ }
+
+ @Test
+ public void testNotInQuery() {
+ setUpQueryTestsRealBackend();
+ Query query =
+ Query.newEntityQueryBuilder()
+ .setKind(TASK_CONCEPTS)
+ .setFilter(PropertyFilter.not_in("category", ListValue.of("Work", "Chores", "School")))
+ .build();
+ assertValidQuery(query);
+ }
+
+ @Test
+ public void testInQuerySorted() {
+ setUpQueryTestsRealBackend();
+ Query query =
+ Query.newEntityQueryBuilder()
+ .setKind(TASK_CONCEPTS)
+ .setFilter(PropertyFilter.in("tag", ListValue.of("learn", "study")))
+ .setOrderBy(OrderBy.asc("tag"))
+ .build();
+ assertValidQuery(query);
+ }
+
+ @Test
+ public void testStaleReads() throws InterruptedException {
+ setUpQueryTestsRealBackend();
+ // waiting for 6 seconds, so that we can query with read time of 5 seconds ago
+ TimeUnit.SECONDS.sleep(6);
+ Key taskKey =
+ datastore
+ .newKeyFactory()
+ .setKind(TASK_CONCEPTS)
+ .addAncestors(PathElement.of("TaskList", "default"))
+ .newKey("someTask");
+
+ Timestamp fiveSecondsAgo =
+ Timestamp.ofTimeSecondsAndNanos(Timestamp.now().getSeconds() - 5L, 0);
+ // Create a readOption with read time fiveSecondsAgo
+ ReadOption readOption = ReadOption.readTime(fiveSecondsAgo);
+ // Use the readOption to Fetch entity
+ Entity entity = datastore.get(taskKey, readOption);
+
+ // Use the readOption to Query kind Task
+ Query query = Query.newEntityQueryBuilder().setKind(TASK_CONCEPTS).setLimit(10).build();
+ assertValidQuery(query);
+ }
+}
diff --git a/google-cloud-datastore/src/test/java/com/google/cloud/datastore/it/ITDatastoreTest.java b/google-cloud-datastore/src/test/java/com/google/cloud/datastore/it/ITDatastoreTest.java
index f010d8135..3a5ccabac 100644
--- a/google-cloud-datastore/src/test/java/com/google/cloud/datastore/it/ITDatastoreTest.java
+++ b/google-cloud-datastore/src/test/java/com/google/cloud/datastore/it/ITDatastoreTest.java
@@ -29,6 +29,7 @@
import static org.junit.Assert.fail;
import com.google.cloud.Timestamp;
+import com.google.cloud.Tuple;
import com.google.cloud.datastore.AggregationQuery;
import com.google.cloud.datastore.Batch;
import com.google.cloud.datastore.BooleanValue;
@@ -335,57 +336,82 @@ public void testNewTransactionCommit() {
}
@Test
- public void testTransactionWithRead() {
- Transaction transaction = DATASTORE.newTransaction();
- assertNull(transaction.get(KEY3));
- transaction.add(ENTITY3);
- transaction.commit();
+ public void testTransactionWithRead() throws Exception {
+ StatementExecutor statementExecutor = new StatementExecutor();
+ Transaction baseTransaction = DATASTORE.newTransaction();
+ assertNull(baseTransaction.get(KEY3));
+ baseTransaction.add(ENTITY3);
+ baseTransaction.commit();
assertEquals(ENTITY3, DATASTORE.get(KEY3));
- transaction = DATASTORE.newTransaction();
- assertEquals(ENTITY3, transaction.get(KEY3));
- // update entity3 during the transaction
- DATASTORE.put(Entity.newBuilder(ENTITY2).clear().set("from", "datastore").build());
- transaction.update(Entity.newBuilder(ENTITY2).clear().set("from", "transaction").build());
- try {
- transaction.commit();
- fail("Expecting a failure");
- } catch (DatastoreException expected) {
- assertEquals("ABORTED", expected.getReason());
- }
+ Transaction transaction = DATASTORE.newTransaction();
+ statementExecutor.execute(
+ Tuple.of("T1", () -> assertEquals(ENTITY3, transaction.get(KEY3))),
+ // update entity3 during the transaction, will be blocked in case of pessimistic concurrency
+ Tuple.of(
+ "T2",
+ () ->
+ DATASTORE.put(Entity.newBuilder(ENTITY3).clear().set("from", "datastore").build())),
+ Tuple.of(
+ "T1",
+ () ->
+ transaction.update(
+ Entity.newBuilder(ENTITY3).clear().set("from", "transaction").build())),
+ Tuple.of("T1", transaction::commit) // T1 will throw error in case of optimistic concurrency
+ );
+
+ boolean t1AllPassed = statementExecutor.didAllPass("T1");
+ boolean t2AllPassed = statementExecutor.didAllPass("T2");
+ // If two transactions conflict with each other, the database guarantees that only
+ // one can commit successfully at a time. Please refer to StatementExecutor class for more info.
+ // Using XOR to ensure that only one of transaction group is successful,
+ boolean onlyOneTransactionIsSuccessful = t1AllPassed ^ t2AllPassed;
+
+ assertThat(onlyOneTransactionIsSuccessful).isTrue();
}
@Test
- public void testTransactionWithQuery() {
+ public void testTransactionWithQuery() throws Exception {
+ StatementExecutor statementExecutor = new StatementExecutor();
Query query =
Query.newEntityQueryBuilder()
.setKind(KIND2)
.setFilter(PropertyFilter.hasAncestor(KEY2))
.setNamespace(NAMESPACE)
.build();
- Transaction transaction = DATASTORE.newTransaction();
- QueryResults results = transaction.run(query);
- assertTrue(results.hasNext());
- assertEquals(ENTITY2, results.next());
- assertFalse(results.hasNext());
- transaction.add(ENTITY3);
- transaction.commit();
+ Transaction baseTransaction = DATASTORE.newTransaction();
+ QueryResults baseResults = baseTransaction.run(query);
+ assertTrue(baseResults.hasNext());
+ assertEquals(ENTITY2, baseResults.next());
+ assertFalse(baseResults.hasNext());
+ baseTransaction.add(ENTITY3);
+ baseTransaction.commit();
assertEquals(ENTITY3, DATASTORE.get(KEY3));
- transaction = DATASTORE.newTransaction();
- results = transaction.run(query);
- assertTrue(results.hasNext());
- assertEquals(ENTITY2, results.next());
- assertFalse(results.hasNext());
- transaction.delete(ENTITY3.getKey());
- // update entity2 during the transaction
- DATASTORE.put(Entity.newBuilder(ENTITY2).clear().build());
- try {
- transaction.commit();
- fail("Expecting a failure");
- } catch (DatastoreException expected) {
- assertEquals("ABORTED", expected.getReason());
- }
+ Transaction transaction = DATASTORE.newTransaction();
+ statementExecutor.execute(
+ Tuple.of(
+ "T1",
+ () -> {
+ QueryResults results = transaction.run(query);
+ assertTrue(results.hasNext());
+ assertEquals(ENTITY2, results.next());
+ assertFalse(results.hasNext());
+ }),
+ Tuple.of("T1", () -> transaction.delete(ENTITY3.getKey())),
+ // update entity2 during the transaction, will be blocked in case of pessimistic concurrency
+ Tuple.of("T2", () -> DATASTORE.put(Entity.newBuilder(ENTITY2).clear().build())),
+ Tuple.of("T1", transaction::commit) // T1 will throw error in case of optimistic concurrency
+ );
+
+ boolean t1AllPassed = statementExecutor.didAllPass("T1");
+ boolean t2AllPassed = statementExecutor.didAllPass("T2");
+ // If two transactions conflict with each other, the database guarantees that only
+ // one can commit successfully at a time. Please refer to StatementExecutor class for more info.
+ // Using XOR to ensure that only one of transaction group is successful,
+ boolean onlyOneTransactionIsSuccessful = t1AllPassed ^ t2AllPassed;
+
+ assertThat(onlyOneTransactionIsSuccessful).isTrue();
}
@Test
diff --git a/google-cloud-datastore/src/test/java/com/google/cloud/datastore/it/StatementExecutor.java b/google-cloud-datastore/src/test/java/com/google/cloud/datastore/it/StatementExecutor.java
new file mode 100644
index 000000000..c8ded3d89
--- /dev/null
+++ b/google-cloud-datastore/src/test/java/com/google/cloud/datastore/it/StatementExecutor.java
@@ -0,0 +1,91 @@
+/*
+ * Copyright 2023 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.datastore.it;
+
+import static java.util.concurrent.TimeUnit.SECONDS;
+
+import com.google.cloud.Tuple;
+import com.google.cloud.datastore.DatastoreException;
+import com.google.common.collect.ArrayListMultimap;
+import com.google.common.collect.Multimap;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeoutException;
+
+/**
+ * An executor class to handle interleaved transactions.
+ *
+ * It executes statements (under multiple transactions) and record their failures under a groupId
+ * provided by users.
+ */
+class StatementExecutor {
+
+ private final Multimap failures = ArrayListMultimap.create();
+
+ /**
+ * Executes a list of {@link Statement} one by one and record their failures under the groupId. In
+ * case of pessimistic concurrency, a statement will be blocked and cause delay until another
+ * transaction which was started earlier is committed. In case of optimistic concurrency, both
+ * transaction can perform their operation simultaneously, but the one which commits first will be
+ * a winner and other one will get an error on commit operation indicating a need for retry.
+ *
+ * @param tuples A {@link Statement(String, String) Tuple(<String, Statement>)} has a
+ * groupId of {@link String} type and a {@link Statement} to execute.
+ */
+ @SafeVarargs
+ final void execute(Tuple... tuples) throws Exception {
+ ExecutorService executorService = Executors.newSingleThreadExecutor();
+ for (Tuple tuple : tuples) {
+ String groupId = tuple.x();
+ Statement statement = tuple.y();
+ Future> future = executorService.submit(statement::execute);
+ try {
+ // waiting for statement to execute
+ future.get(10, SECONDS);
+ } catch (Exception exception) {
+ future.cancel(true);
+ if (transactionConflict(exception)) {
+ failures.put(groupId, exception);
+ } else {
+ throw exception;
+ }
+ }
+ }
+ executorService.shutdown();
+ }
+
+ boolean didAllPass(String groupId) {
+ return failures.get(groupId).isEmpty();
+ }
+
+ private boolean transactionConflict(Exception exception) {
+ if (exception instanceof TimeoutException) { // timed out coz of pessimistic concurrency delay
+ return true;
+ }
+ return exception instanceof ExecutionException
+ && exception.getCause().getClass() == DatastoreException.class
+ && exception
+ .getMessage()
+ .contains("contention"); // exception raise coz of optimistic concurrency
+ }
+
+ interface Statement {
+ void execute();
+ }
+}
diff --git a/google-cloud-datastore/src/test/resources/index.yaml b/google-cloud-datastore/src/test/resources/index.yaml
new file mode 100644
index 000000000..ff1b08626
--- /dev/null
+++ b/google-cloud-datastore/src/test/resources/index.yaml
@@ -0,0 +1,48 @@
+# Copyright 2023 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# one time index creation is required to run ITDatastoreConceptsTest
+# see https://cloud.google.com/sdk/gcloud/reference/datastore/indexes/create for more details
+indexes:
+ - kind: TaskConcepts
+ properties:
+ - name: done
+ - name: priority
+ direction: desc
+ - kind: TaskConcepts
+ properties:
+ - name: tag
+ - name: tag
+ - kind: TaskConcepts
+ properties:
+ - name: priority
+ - name: created
+ - kind: TaskConcepts
+ properties:
+ - name: category
+ - name: priority
+ - kind: TaskConcepts
+ properties:
+ - name: priority
+ direction: desc
+ - name: created
+ - kind: TaskConcepts
+ properties:
+ - name: percent_complete
+ - name: priority
+ - kind: TaskConcepts
+ properties:
+ - name: done
+ - name: priority
+ direction: desc
\ No newline at end of file
diff --git a/grpc-google-cloud-datastore-admin-v1/pom.xml b/grpc-google-cloud-datastore-admin-v1/pom.xml
index 04c30c10b..142659867 100644
--- a/grpc-google-cloud-datastore-admin-v1/pom.xml
+++ b/grpc-google-cloud-datastore-admin-v1/pom.xml
@@ -4,13 +4,13 @@
4.0.0
com.google.api.grpc
grpc-google-cloud-datastore-admin-v1
- 2.14.6
+ 2.14.7
grpc-google-cloud-datastore-admin-v1
GRPC library for google-cloud-datastore
com.google.cloud
google-cloud-datastore-parent
- 2.14.6
+ 2.14.7
diff --git a/pom.xml b/pom.xml
index 3c0825dca..b0b6d9fbe 100644
--- a/pom.xml
+++ b/pom.xml
@@ -4,7 +4,7 @@
com.google.cloud
google-cloud-datastore-parent
pom
- 2.14.6
+ 2.14.7
Google Cloud Datastore Parent
https://github.com/googleapis/java-datastore
@@ -143,7 +143,7 @@
github
google-cloud-datastore-parent
https://googleapis.dev/java/google-api-grpc/latest
- 2.18.0
+ 2.19.1
@@ -151,7 +151,7 @@
com.google.cloud
google-cloud-shared-dependencies
- 3.9.0
+ 3.10.1
pom
import
@@ -159,27 +159,27 @@
com.google.api.grpc
proto-google-cloud-datastore-admin-v1
- 2.14.6
+ 2.14.7
com.google.api.grpc
grpc-google-cloud-datastore-admin-v1
- 2.14.6
+ 2.14.7
com.google.cloud
google-cloud-datastore
- 2.14.6
+ 2.14.7
com.google.api.grpc
proto-google-cloud-datastore-v1
- 0.105.6
+ 0.105.7
com.google.cloud.datastore
datastore-v1-proto-client
- 2.14.6
+ 2.14.7
com.google.api.grpc
diff --git a/proto-google-cloud-datastore-admin-v1/pom.xml b/proto-google-cloud-datastore-admin-v1/pom.xml
index ef5aacc99..70bd9bb5f 100644
--- a/proto-google-cloud-datastore-admin-v1/pom.xml
+++ b/proto-google-cloud-datastore-admin-v1/pom.xml
@@ -4,13 +4,13 @@
4.0.0
com.google.api.grpc
proto-google-cloud-datastore-admin-v1
- 2.14.6
+ 2.14.7
proto-google-cloud-datastore-admin-v1
Proto library for google-cloud-datastore
com.google.cloud
google-cloud-datastore-parent
- 2.14.6
+ 2.14.7
diff --git a/proto-google-cloud-datastore-v1/pom.xml b/proto-google-cloud-datastore-v1/pom.xml
index e4f046394..e8c1b8b3d 100644
--- a/proto-google-cloud-datastore-v1/pom.xml
+++ b/proto-google-cloud-datastore-v1/pom.xml
@@ -4,13 +4,13 @@
4.0.0
com.google.api.grpc
proto-google-cloud-datastore-v1
- 0.105.6
+ 0.105.7
proto-google-cloud-datastore-v1
PROTO library for proto-google-cloud-datastore-v1
com.google.cloud
google-cloud-datastore-parent
- 2.14.6
+ 2.14.7
diff --git a/samples/install-without-bom/pom.xml b/samples/install-without-bom/pom.xml
index c5147926b..5a68aec6a 100644
--- a/samples/install-without-bom/pom.xml
+++ b/samples/install-without-bom/pom.xml
@@ -29,7 +29,7 @@
com.google.cloud
google-cloud-datastore
- 2.14.5
+ 2.14.6
@@ -53,7 +53,7 @@
org.codehaus.mojo
build-helper-maven-plugin
- 3.3.0
+ 3.4.0
add-snippets-source
diff --git a/samples/native-image-sample/pom.xml b/samples/native-image-sample/pom.xml
index e865b2bb5..4db871c2b 100644
--- a/samples/native-image-sample/pom.xml
+++ b/samples/native-image-sample/pom.xml
@@ -28,7 +28,7 @@
com.google.cloud
libraries-bom
- 26.14.0
+ 26.15.0
pom
import
@@ -86,7 +86,7 @@
org.graalvm.buildtools
junit-platform-native
- 0.9.21
+ 0.9.22
test
@@ -107,7 +107,7 @@
org.graalvm.buildtools
native-maven-plugin
- 0.9.21
+ 0.9.22
true
com.example.datastore.NativeImageDatastoreSample
diff --git a/samples/snapshot/pom.xml b/samples/snapshot/pom.xml
index a71ad69ab..6421d6f95 100644
--- a/samples/snapshot/pom.xml
+++ b/samples/snapshot/pom.xml
@@ -28,7 +28,7 @@
com.google.cloud
google-cloud-datastore
- 2.14.5
+ 2.14.6
@@ -52,7 +52,7 @@
org.codehaus.mojo
build-helper-maven-plugin
- 3.3.0
+ 3.4.0
add-snippets-source
diff --git a/samples/snippets/pom.xml b/samples/snippets/pom.xml
index 315ff268a..cde13ac88 100644
--- a/samples/snippets/pom.xml
+++ b/samples/snippets/pom.xml
@@ -30,7 +30,7 @@
com.google.cloud
libraries-bom
- 26.14.0
+ 26.15.0
pom
import
diff --git a/versions.txt b/versions.txt
index 063513c96..bd1c2fa3e 100644
--- a/versions.txt
+++ b/versions.txt
@@ -1,9 +1,9 @@
# Format:
# module:released-version:current-version
-google-cloud-datastore:2.14.6:2.14.6
-google-cloud-datastore-bom:2.14.6:2.14.6
-proto-google-cloud-datastore-v1:0.105.6:0.105.6
-datastore-v1-proto-client:2.14.6:2.14.6
-proto-google-cloud-datastore-admin-v1:2.14.6:2.14.6
-grpc-google-cloud-datastore-admin-v1:2.14.6:2.14.6
+google-cloud-datastore:2.14.7:2.14.7
+google-cloud-datastore-bom:2.14.7:2.14.7
+proto-google-cloud-datastore-v1:0.105.7:0.105.7
+datastore-v1-proto-client:2.14.7:2.14.7
+proto-google-cloud-datastore-admin-v1:2.14.7:2.14.7
+grpc-google-cloud-datastore-admin-v1:2.14.7:2.14.7