diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha
index 2c3fb6e13..56c8253ff 100644
--- a/.codegen/_openapi_sha
+++ b/.codegen/_openapi_sha
@@ -1 +1 @@
-d3853c8dee5806d04da2ae8910f273ffb35719a5
\ No newline at end of file
+a7a9dc025bb80303e676bf3708942c6aa06689f1
\ No newline at end of file
diff --git a/.gitattributes b/.gitattributes
index 75a37182c..851a4567d 100755
--- a/.gitattributes
+++ b/.gitattributes
@@ -174,6 +174,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccount
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountStorageCredentialsRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnectionsResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsResponse.java linguist-generated=true
@@ -184,6 +185,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRegiste
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRegisteredModelsResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSummariesRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSystemSchemasRequest.java linguist-generated=true
@@ -480,6 +482,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UnpinCluste
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/VolumesStorageInfo.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkloadType.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkspaceStorageInfo.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewService.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/AddBlock.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Close.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Create.java linguist-generated=true
@@ -1220,6 +1226,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNccAzu
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPrivateEndpointRulesRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPublicTokensResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokenManagementRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokensResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListType.java linguist-generated=true
@@ -1505,6 +1512,52 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesServi
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Widget.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WidgetOptions.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WidgetPosition.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ColumnInfo.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateEndpoint.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateVectorIndexRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateVectorIndexResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataResult.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataStatus.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataVectorIndexRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataVectorIndexResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteEndpointRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteIndexRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DirectAccessVectorIndexSpec.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EmbeddingConfig.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EmbeddingSourceColumn.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EmbeddingVectorColumn.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointInfo.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointStatus.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointStatusState.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointType.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/GetEndpointRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/GetIndexRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListEndpointResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListEndpointsRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListIndexesRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListVectorIndexesResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MiniVectorIndex.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/PipelineType.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ResultData.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ResultManifest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/SyncIndexRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataResult.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataStatus.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataVectorIndexRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataVectorIndexResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorIndex.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorIndexStatus.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorIndexType.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsAPI.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsImpl.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsService.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesAPI.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesImpl.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesService.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/AclItem.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/AclPermission.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/AzureKeyVaultSecretScopeMetadata.java linguist-generated=true
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
index 7a93fae99..72ab50c52 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
@@ -55,6 +55,8 @@
import com.databricks.sdk.service.compute.LibrariesService;
import com.databricks.sdk.service.compute.PolicyFamiliesAPI;
import com.databricks.sdk.service.compute.PolicyFamiliesService;
+import com.databricks.sdk.service.dashboards.LakeviewAPI;
+import com.databricks.sdk.service.dashboards.LakeviewService;
import com.databricks.sdk.service.files.DbfsService;
import com.databricks.sdk.service.files.FilesAPI;
import com.databricks.sdk.service.files.FilesService;
@@ -124,6 +126,10 @@
import com.databricks.sdk.service.sql.StatementExecutionService;
import com.databricks.sdk.service.sql.WarehousesAPI;
import com.databricks.sdk.service.sql.WarehousesService;
+import com.databricks.sdk.service.vectorsearch.VectorSearchEndpointsAPI;
+import com.databricks.sdk.service.vectorsearch.VectorSearchEndpointsService;
+import com.databricks.sdk.service.vectorsearch.VectorSearchIndexesAPI;
+import com.databricks.sdk.service.vectorsearch.VectorSearchIndexesService;
import com.databricks.sdk.service.workspace.GitCredentialsAPI;
import com.databricks.sdk.service.workspace.GitCredentialsService;
import com.databricks.sdk.service.workspace.ReposAPI;
@@ -168,6 +174,7 @@ public class WorkspaceClient {
private InstanceProfilesAPI instanceProfilesAPI;
private IpAccessListsAPI ipAccessListsAPI;
private JobsAPI jobsAPI;
+ private LakeviewAPI lakeviewAPI;
private LibrariesAPI librariesAPI;
private MetastoresAPI metastoresAPI;
private ModelRegistryAPI modelRegistryAPI;
@@ -197,6 +204,8 @@ public class WorkspaceClient {
private TokenManagementAPI tokenManagementAPI;
private TokensAPI tokensAPI;
private UsersAPI usersAPI;
+ private VectorSearchEndpointsAPI vectorSearchEndpointsAPI;
+ private VectorSearchIndexesAPI vectorSearchIndexesAPI;
private VolumesAPI volumesAPI;
private WarehousesAPI warehousesAPI;
private WorkspaceAPI workspaceAPI;
@@ -240,6 +249,7 @@ public WorkspaceClient(DatabricksConfig config) {
instanceProfilesAPI = new InstanceProfilesAPI(apiClient);
ipAccessListsAPI = new IpAccessListsAPI(apiClient);
jobsAPI = new JobsAPI(apiClient);
+ lakeviewAPI = new LakeviewAPI(apiClient);
librariesAPI = new LibrariesAPI(apiClient);
metastoresAPI = new MetastoresAPI(apiClient);
modelRegistryAPI = new ModelRegistryAPI(apiClient);
@@ -269,6 +279,8 @@ public WorkspaceClient(DatabricksConfig config) {
tokenManagementAPI = new TokenManagementAPI(apiClient);
tokensAPI = new TokensAPI(apiClient);
usersAPI = new UsersAPI(apiClient);
+ vectorSearchEndpointsAPI = new VectorSearchEndpointsAPI(apiClient);
+ vectorSearchIndexesAPI = new VectorSearchIndexesAPI(apiClient);
volumesAPI = new VolumesAPI(apiClient);
warehousesAPI = new WarehousesAPI(apiClient);
workspaceAPI = new WorkspaceAPI(apiClient);
@@ -681,6 +693,14 @@ public JobsAPI jobs() {
return jobsAPI;
}
+ /**
+ * These APIs provide specific management operations for Lakeview dashboards. Generic resource
+ * management can be done with Workspace API (import, export, get-status, list, delete).
+ */
+ public LakeviewAPI lakeview() {
+ return lakeviewAPI;
+ }
+
/**
* The Libraries API allows you to install and uninstall libraries and get the status of libraries
* on a cluster.
@@ -1206,6 +1226,25 @@ public UsersAPI users() {
return usersAPI;
}
+ /** **Endpoint**: Represents the compute resources to host vector search indexes. */
+ public VectorSearchEndpointsAPI vectorSearchEndpoints() {
+ return vectorSearchEndpointsAPI;
+ }
+
+ /**
+ * **Index**: An efficient representation of your embedding vectors that supports real-time and
+ * efficient approximate nearest neighbor (ANN) search queries.
+ *
+ *
There are 2 types of Vector Search indexes: * **Delta Sync Index**: An index that
+ * automatically syncs with a source Delta Table, automatically and incrementally updating the
+ * index as the underlying data in the Delta Table changes. * **Direct Vector Access Index**: An
+ * index that supports direct read and write of vectors and metadata through our REST and SDK
+ * APIs. With this model, the user manages index updates.
+ */
+ public VectorSearchIndexesAPI vectorSearchIndexes() {
+ return vectorSearchIndexesAPI;
+ }
+
/**
* Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, organizing and
* processing files. Use cases include running machine learning on unstructured data such as
@@ -1441,6 +1480,12 @@ public WorkspaceClient withJobsImpl(JobsService jobs) {
return this;
}
+ /** Replace LakeviewAPI implementation with mock */
+ public WorkspaceClient withLakeviewImpl(LakeviewService lakeview) {
+ lakeviewAPI = new LakeviewAPI(lakeview);
+ return this;
+ }
+
/** Replace LibrariesAPI implementation with mock */
public WorkspaceClient withLibrariesImpl(LibrariesService libraries) {
librariesAPI = new LibrariesAPI(libraries);
@@ -1617,6 +1662,20 @@ public WorkspaceClient withUsersImpl(UsersService users) {
return this;
}
+ /** Replace VectorSearchEndpointsAPI implementation with mock */
+ public WorkspaceClient withVectorSearchEndpointsImpl(
+ VectorSearchEndpointsService vectorSearchEndpoints) {
+ vectorSearchEndpointsAPI = new VectorSearchEndpointsAPI(vectorSearchEndpoints);
+ return this;
+ }
+
+ /** Replace VectorSearchIndexesAPI implementation with mock */
+ public WorkspaceClient withVectorSearchIndexesImpl(
+ VectorSearchIndexesService vectorSearchIndexes) {
+ vectorSearchIndexesAPI = new VectorSearchIndexesAPI(vectorSearchIndexes);
+ return this;
+ }
+
/** Replace VolumesAPI implementation with mock */
public WorkspaceClient withVolumesImpl(VolumesService volumes) {
volumesAPI = new VolumesAPI(volumes);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java
index 3663b8dcf..d9759ac31 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java
@@ -3,6 +3,7 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.Paginator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -83,11 +84,22 @@ public ExternalLocationInfo get(GetExternalLocationRequest request) {
*
*
Gets an array of external locations (__ExternalLocationInfo__ objects) from the metastore.
* The caller must be a metastore admin, the owner of the external location, or a user that has
- * some privilege on the external location. There is no guarantee of a specific ordering of the
- * elements in the array.
+ * some privilege on the external location. For unpaginated request, there is no guarantee of a
+ * specific ordering of the elements in the array. For paginated request, elements are ordered by
+ * their name.
*/
- public Iterable list() {
- return impl.list().getExternalLocations();
+ public Iterable list(ListExternalLocationsRequest request) {
+ return new Paginator<>(
+ request,
+ impl::list,
+ ListExternalLocationsResponse::getExternalLocations,
+ response -> {
+ String token = response.getNextPageToken();
+ if (token == null) {
+ return null;
+ }
+ return request.setPageToken(token);
+ });
}
public ExternalLocationInfo update(String name) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsImpl.java
index f4a68dea1..d685d8573 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsImpl.java
@@ -41,11 +41,11 @@ public ExternalLocationInfo get(GetExternalLocationRequest request) {
}
@Override
- public ListExternalLocationsResponse list() {
+ public ListExternalLocationsResponse list(ListExternalLocationsRequest request) {
String path = "/api/2.1/unity-catalog/external-locations";
Map headers = new HashMap<>();
headers.put("Accept", "application/json");
- return apiClient.GET(path, ListExternalLocationsResponse.class, headers);
+ return apiClient.GET(path, request, ListExternalLocationsResponse.class, headers);
}
@Override
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsService.java
index 6be64bc7a..0cefbac94 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsService.java
@@ -51,10 +51,11 @@ public interface ExternalLocationsService {
*
*
Gets an array of external locations (__ExternalLocationInfo__ objects) from the metastore.
* The caller must be a metastore admin, the owner of the external location, or a user that has
- * some privilege on the external location. There is no guarantee of a specific ordering of the
- * elements in the array.
+ * some privilege on the external location. For unpaginated request, there is no guarantee of a
+ * specific ordering of the elements in the array. For paginated request, elements are ordered by
+ * their name.
*/
- ListExternalLocationsResponse list();
+ ListExternalLocationsResponse list(ListExternalLocationsRequest listExternalLocationsRequest);
/**
* Update an external location.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsAPI.java
index 31aab9536..e5e01a85e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsAPI.java
@@ -3,6 +3,7 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.Paginator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -93,11 +94,21 @@ public Iterable list(String catalogName, String schemaName) {
* admin, all functions are returned in the output list. Otherwise, the user must have the
* **USE_CATALOG** privilege on the catalog and the **USE_SCHEMA** privilege on the schema, and
* the output list contains only functions for which either the user has the **EXECUTE** privilege
- * or the user is the owner. There is no guarantee of a specific ordering of the elements in the
- * array.
+ * or the user is the owner. For unpaginated request, there is no guarantee of a specific ordering
+ * of the elements in the array. For paginated request, elements are ordered by their name.
*/
public Iterable list(ListFunctionsRequest request) {
- return impl.list(request).getFunctions();
+ return new Paginator<>(
+ request,
+ impl::list,
+ ListFunctionsResponse::getFunctions,
+ response -> {
+ String token = response.getNextPageToken();
+ if (token == null) {
+ return null;
+ }
+ return request.setPageToken(token);
+ });
}
public FunctionInfo update(String name) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsService.java
index c47075d69..f891bfd19 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsService.java
@@ -58,8 +58,8 @@ public interface FunctionsService {
* admin, all functions are returned in the output list. Otherwise, the user must have the
* **USE_CATALOG** privilege on the catalog and the **USE_SCHEMA** privilege on the schema, and
* the output list contains only functions for which either the user has the **EXECUTE** privilege
- * or the user is the owner. There is no guarantee of a specific ordering of the elements in the
- * array.
+ * or the user is the owner. For unpaginated request, there is no guarantee of a specific ordering
+ * of the elements in the array. For paginated request, elements are ordered by their name.
*/
ListFunctionsResponse list(ListFunctionsRequest listFunctionsRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java
new file mode 100755
index 000000000..74ef5cbf3
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java
@@ -0,0 +1,65 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+/** List external locations */
+@Generated
+public class ListExternalLocationsRequest {
+ /**
+ * Maximum number of external locations to return. If not set, all the external locations are
+ * returned (not recommended). - when set to a value greater than 0, the page length is the
+ * minimum of this value and a server configured value; - when set to 0, the page length is set to
+ * a server configured value (recommended); - when set to a value less than 0, an invalid
+ * parameter error is returned;
+ */
+ @QueryParam("max_results")
+ private Long maxResults;
+
+ /** Opaque pagination token to go to next page based on previous query. */
+ @QueryParam("page_token")
+ private String pageToken;
+
+ public ListExternalLocationsRequest setMaxResults(Long maxResults) {
+ this.maxResults = maxResults;
+ return this;
+ }
+
+ public Long getMaxResults() {
+ return maxResults;
+ }
+
+ public ListExternalLocationsRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListExternalLocationsRequest that = (ListExternalLocationsRequest) o;
+ return Objects.equals(maxResults, that.maxResults) && Objects.equals(pageToken, that.pageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(maxResults, pageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListExternalLocationsRequest.class)
+ .add("maxResults", maxResults)
+ .add("pageToken", pageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsResponse.java
index 4661229e0..f43f138f4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsResponse.java
@@ -14,6 +14,13 @@ public class ListExternalLocationsResponse {
@JsonProperty("external_locations")
private Collection externalLocations;
+ /**
+ * Opaque token to retrieve the next page of results. Absent if there are no more pages.
+ * __page_token__ should be set to this value for the next request (for the next page of results).
+ */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
public ListExternalLocationsResponse setExternalLocations(
Collection externalLocations) {
this.externalLocations = externalLocations;
@@ -24,23 +31,34 @@ public Collection getExternalLocations() {
return externalLocations;
}
+ public ListExternalLocationsResponse setNextPageToken(String nextPageToken) {
+ this.nextPageToken = nextPageToken;
+ return this;
+ }
+
+ public String getNextPageToken() {
+ return nextPageToken;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ListExternalLocationsResponse that = (ListExternalLocationsResponse) o;
- return Objects.equals(externalLocations, that.externalLocations);
+ return Objects.equals(externalLocations, that.externalLocations)
+ && Objects.equals(nextPageToken, that.nextPageToken);
}
@Override
public int hashCode() {
- return Objects.hash(externalLocations);
+ return Objects.hash(externalLocations, nextPageToken);
}
@Override
public String toString() {
return new ToStringer(ListExternalLocationsResponse.class)
.add("externalLocations", externalLocations)
+ .add("nextPageToken", nextPageToken)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsRequest.java
index fbc996c2f..7b430e1a0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsRequest.java
@@ -14,6 +14,20 @@ public class ListFunctionsRequest {
@QueryParam("catalog_name")
private String catalogName;
+ /**
+ * Maximum number of functions to return. If not set, all the functions are returned (not
+ * recommended). - when set to a value greater than 0, the page length is the minimum of this
+ * value and a server configured value; - when set to 0, the page length is set to a server
+ * configured value (recommended); - when set to a value less than 0, an invalid parameter error
+ * is returned;
+ */
+ @QueryParam("max_results")
+ private Long maxResults;
+
+ /** Opaque pagination token to go to next page based on previous query. */
+ @QueryParam("page_token")
+ private String pageToken;
+
/** Parent schema of functions. */
@QueryParam("schema_name")
private String schemaName;
@@ -27,6 +41,24 @@ public String getCatalogName() {
return catalogName;
}
+ public ListFunctionsRequest setMaxResults(Long maxResults) {
+ this.maxResults = maxResults;
+ return this;
+ }
+
+ public Long getMaxResults() {
+ return maxResults;
+ }
+
+ public ListFunctionsRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
public ListFunctionsRequest setSchemaName(String schemaName) {
this.schemaName = schemaName;
return this;
@@ -42,18 +74,22 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
ListFunctionsRequest that = (ListFunctionsRequest) o;
return Objects.equals(catalogName, that.catalogName)
+ && Objects.equals(maxResults, that.maxResults)
+ && Objects.equals(pageToken, that.pageToken)
&& Objects.equals(schemaName, that.schemaName);
}
@Override
public int hashCode() {
- return Objects.hash(catalogName, schemaName);
+ return Objects.hash(catalogName, maxResults, pageToken, schemaName);
}
@Override
public String toString() {
return new ToStringer(ListFunctionsRequest.class)
.add("catalogName", catalogName)
+ .add("maxResults", maxResults)
+ .add("pageToken", pageToken)
.add("schemaName", schemaName)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsResponse.java
index 01aac6b55..b88b18e14 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsResponse.java
@@ -14,6 +14,13 @@ public class ListFunctionsResponse {
@JsonProperty("functions")
private Collection functions;
+ /**
+ * Opaque token to retrieve the next page of results. Absent if there are no more pages.
+ * __page_token__ should be set to this value for the next request (for the next page of results).
+ */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
public ListFunctionsResponse setFunctions(Collection functions) {
this.functions = functions;
return this;
@@ -23,21 +30,34 @@ public Collection getFunctions() {
return functions;
}
+ public ListFunctionsResponse setNextPageToken(String nextPageToken) {
+ this.nextPageToken = nextPageToken;
+ return this;
+ }
+
+ public String getNextPageToken() {
+ return nextPageToken;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ListFunctionsResponse that = (ListFunctionsResponse) o;
- return Objects.equals(functions, that.functions);
+ return Objects.equals(functions, that.functions)
+ && Objects.equals(nextPageToken, that.nextPageToken);
}
@Override
public int hashCode() {
- return Objects.hash(functions);
+ return Objects.hash(functions, nextPageToken);
}
@Override
public String toString() {
- return new ToStringer(ListFunctionsResponse.class).add("functions", functions).toString();
+ return new ToStringer(ListFunctionsResponse.class)
+ .add("functions", functions)
+ .add("nextPageToken", nextPageToken)
+ .toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsRequest.java
index 8535a0b74..74945e49d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsRequest.java
@@ -13,11 +13,17 @@ public class ListModelVersionsRequest {
/** The full three-level name of the registered model under which to list model versions */
private String fullName;
- /** Max number of model versions to return */
+ /**
+ * Maximum number of model versions to return. If not set, the page length is set to a server
+ * configured value (100, as of 1/3/2024). - when set to a value greater than 0, the page length
+ * is the minimum of this value and a server configured value(1000, as of 1/3/2024); - when set to
+ * 0, the page length is set to a server configured value (100, as of 1/3/2024) (recommended); -
+ * when set to a value less than 0, an invalid parameter error is returned;
+ */
@QueryParam("max_results")
private Long maxResults;
- /** Opaque token to send for the next page of results (pagination). */
+ /** Opaque pagination token to go to next page based on previous query. */
@QueryParam("page_token")
private String pageToken;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsResponse.java
index 4088f2e50..33959df88 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsResponse.java
@@ -14,7 +14,10 @@ public class ListModelVersionsResponse {
@JsonProperty("model_versions")
private Collection modelVersions;
- /** Token to retrieve the next page of results */
+ /**
+ * Opaque token to retrieve the next page of results. Absent if there are no more pages.
+ * __page_token__ should be set to this value for the next request (for the next page of results).
+ */
@JsonProperty("next_page_token")
private String nextPageToken;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasRequest.java
index a8bc3408b..acae1f8b1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasRequest.java
@@ -14,6 +14,20 @@ public class ListSchemasRequest {
@QueryParam("catalog_name")
private String catalogName;
+ /**
+ * Maximum number of schemas to return. If not set, all the schemas are returned (not
+ * recommended). - when set to a value greater than 0, the page length is the minimum of this
+ * value and a server configured value; - when set to 0, the page length is set to a server
+ * configured value (recommended); - when set to a value less than 0, an invalid parameter error
+ * is returned;
+ */
+ @QueryParam("max_results")
+ private Long maxResults;
+
+ /** Opaque pagination token to go to next page based on previous query. */
+ @QueryParam("page_token")
+ private String pageToken;
+
public ListSchemasRequest setCatalogName(String catalogName) {
this.catalogName = catalogName;
return this;
@@ -23,21 +37,45 @@ public String getCatalogName() {
return catalogName;
}
+ public ListSchemasRequest setMaxResults(Long maxResults) {
+ this.maxResults = maxResults;
+ return this;
+ }
+
+ public Long getMaxResults() {
+ return maxResults;
+ }
+
+ public ListSchemasRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ListSchemasRequest that = (ListSchemasRequest) o;
- return Objects.equals(catalogName, that.catalogName);
+ return Objects.equals(catalogName, that.catalogName)
+ && Objects.equals(maxResults, that.maxResults)
+ && Objects.equals(pageToken, that.pageToken);
}
@Override
public int hashCode() {
- return Objects.hash(catalogName);
+ return Objects.hash(catalogName, maxResults, pageToken);
}
@Override
public String toString() {
- return new ToStringer(ListSchemasRequest.class).add("catalogName", catalogName).toString();
+ return new ToStringer(ListSchemasRequest.class)
+ .add("catalogName", catalogName)
+ .add("maxResults", maxResults)
+ .add("pageToken", pageToken)
+ .toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasResponse.java
index 662ea69cd..23021dcba 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasResponse.java
@@ -10,10 +10,26 @@
@Generated
public class ListSchemasResponse {
+ /**
+ * Opaque token to retrieve the next page of results. Absent if there are no more pages.
+ * __page_token__ should be set to this value for the next request (for the next page of results).
+ */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
/** An array of schema information objects. */
@JsonProperty("schemas")
private Collection schemas;
+ public ListSchemasResponse setNextPageToken(String nextPageToken) {
+ this.nextPageToken = nextPageToken;
+ return this;
+ }
+
+ public String getNextPageToken() {
+ return nextPageToken;
+ }
+
public ListSchemasResponse setSchemas(Collection schemas) {
this.schemas = schemas;
return this;
@@ -28,16 +44,20 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ListSchemasResponse that = (ListSchemasResponse) o;
- return Objects.equals(schemas, that.schemas);
+ return Objects.equals(nextPageToken, that.nextPageToken)
+ && Objects.equals(schemas, that.schemas);
}
@Override
public int hashCode() {
- return Objects.hash(schemas);
+ return Objects.hash(nextPageToken, schemas);
}
@Override
public String toString() {
- return new ToStringer(ListSchemasResponse.class).add("schemas", schemas).toString();
+ return new ToStringer(ListSchemasResponse.class)
+ .add("nextPageToken", nextPageToken)
+ .add("schemas", schemas)
+ .toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsRequest.java
new file mode 100755
index 000000000..a46db8b70
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsRequest.java
@@ -0,0 +1,65 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+/** List credentials */
+@Generated
+public class ListStorageCredentialsRequest {
+ /**
+ * Maximum number of storage credentials to return. If not set, all the storage credentials are
+ * returned (not recommended). - when set to a value greater than 0, the page length is the
+ * minimum of this value and a server configured value; - when set to 0, the page length is set to
+ * a server configured value (recommended); - when set to a value less than 0, an invalid
+ * parameter error is returned;
+ */
+ @QueryParam("max_results")
+ private Long maxResults;
+
+ /** Opaque pagination token to go to next page based on previous query. */
+ @QueryParam("page_token")
+ private String pageToken;
+
+ public ListStorageCredentialsRequest setMaxResults(Long maxResults) {
+ this.maxResults = maxResults;
+ return this;
+ }
+
+ public Long getMaxResults() {
+ return maxResults;
+ }
+
+ public ListStorageCredentialsRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListStorageCredentialsRequest that = (ListStorageCredentialsRequest) o;
+ return Objects.equals(maxResults, that.maxResults) && Objects.equals(pageToken, that.pageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(maxResults, pageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListStorageCredentialsRequest.class)
+ .add("maxResults", maxResults)
+ .add("pageToken", pageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsResponse.java
index 5523b5daf..25ef95015 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListStorageCredentialsResponse.java
@@ -10,10 +10,26 @@
@Generated
public class ListStorageCredentialsResponse {
+ /**
+ * Opaque token to retrieve the next page of results. Absent if there are no more pages.
+ * __page_token__ should be set to this value for the next request (for the next page of results).
+ */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
/** */
@JsonProperty("storage_credentials")
private Collection storageCredentials;
+ public ListStorageCredentialsResponse setNextPageToken(String nextPageToken) {
+ this.nextPageToken = nextPageToken;
+ return this;
+ }
+
+ public String getNextPageToken() {
+ return nextPageToken;
+ }
+
public ListStorageCredentialsResponse setStorageCredentials(
Collection storageCredentials) {
this.storageCredentials = storageCredentials;
@@ -29,17 +45,19 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ListStorageCredentialsResponse that = (ListStorageCredentialsResponse) o;
- return Objects.equals(storageCredentials, that.storageCredentials);
+ return Objects.equals(nextPageToken, that.nextPageToken)
+ && Objects.equals(storageCredentials, that.storageCredentials);
}
@Override
public int hashCode() {
- return Objects.hash(storageCredentials);
+ return Objects.hash(nextPageToken, storageCredentials);
}
@Override
public String toString() {
return new ToStringer(ListStorageCredentialsResponse.class)
+ .add("nextPageToken", nextPageToken)
.add("storageCredentials", storageCredentials)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSummariesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSummariesRequest.java
index fef187a0e..a5ed578a8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSummariesRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSummariesRequest.java
@@ -14,11 +14,16 @@ public class ListSummariesRequest {
@QueryParam("catalog_name")
private String catalogName;
- /** Maximum number of tables to return (page length). Defaults to 10000. */
+ /**
+ * Maximum number of summaries for tables to return. If not set, the page length is set to 10000.
+ * - when set to a value less than or equal 0, an invalid parameter error is returned; - when set
+ * to a value greater than 0 and less than or equal 10000, the page length is set to that value; -
+ * when set to a value greater than 10000, an invalid parameter error is returned;
+ */
@QueryParam("max_results")
private Long maxResults;
- /** Opaque token to send for the next page of results (pagination). */
+ /** Opaque pagination token to go to next page based on previous query. */
@QueryParam("page_token")
private String pageToken;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTableSummariesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTableSummariesResponse.java
index 4b51ab013..bfe514e7b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTableSummariesResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTableSummariesResponse.java
@@ -10,7 +10,10 @@
@Generated
public class ListTableSummariesResponse {
- /** Opaque token for pagination. Omitted if there are no more results. */
+ /**
+ * Opaque token to retrieve the next page of results. Absent if there are no more pages.
+ * __page_token__ should be set to this value for the next request (for the next page of results).
+ */
@JsonProperty("next_page_token")
private String nextPageToken;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesRequest.java
index 98f5fe34a..7ec16a4bf 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesRequest.java
@@ -19,16 +19,22 @@ public class ListTablesRequest {
private Boolean includeDeltaMetadata;
/**
- * Maximum number of tables to return (page length). If not set, all accessible tables in the
- * schema are returned. If set to:
- *
- *
* greater than 0, page length is the minimum of this value and a server configured value. *
- * equal to 0, page length is set to a server configured value. * lesser than 0, invalid parameter
- * error.
+ * Maximum number of tables to return. If not set, all the tables are returned (not recommended).
+ * - when set to a value greater than 0, the page length is the minimum of this value and a server
+ * configured value; - when set to 0, the page length is set to a server configured value
+ * (recommended); - when set to a value less than 0, an invalid parameter error is returned;
*/
@QueryParam("max_results")
private Long maxResults;
+ /** Whether to omit the columns of the table from the response or not. */
+ @QueryParam("omit_columns")
+ private Boolean omitColumns;
+
+ /** Whether to omit the properties of the table from the response or not. */
+ @QueryParam("omit_properties")
+ private Boolean omitProperties;
+
/** Opaque token to send for the next page of results (pagination). */
@QueryParam("page_token")
private String pageToken;
@@ -64,6 +70,24 @@ public Long getMaxResults() {
return maxResults;
}
+ public ListTablesRequest setOmitColumns(Boolean omitColumns) {
+ this.omitColumns = omitColumns;
+ return this;
+ }
+
+ public Boolean getOmitColumns() {
+ return omitColumns;
+ }
+
+ public ListTablesRequest setOmitProperties(Boolean omitProperties) {
+ this.omitProperties = omitProperties;
+ return this;
+ }
+
+ public Boolean getOmitProperties() {
+ return omitProperties;
+ }
+
public ListTablesRequest setPageToken(String pageToken) {
this.pageToken = pageToken;
return this;
@@ -90,13 +114,22 @@ public boolean equals(Object o) {
return Objects.equals(catalogName, that.catalogName)
&& Objects.equals(includeDeltaMetadata, that.includeDeltaMetadata)
&& Objects.equals(maxResults, that.maxResults)
+ && Objects.equals(omitColumns, that.omitColumns)
+ && Objects.equals(omitProperties, that.omitProperties)
&& Objects.equals(pageToken, that.pageToken)
&& Objects.equals(schemaName, that.schemaName);
}
@Override
public int hashCode() {
- return Objects.hash(catalogName, includeDeltaMetadata, maxResults, pageToken, schemaName);
+ return Objects.hash(
+ catalogName,
+ includeDeltaMetadata,
+ maxResults,
+ omitColumns,
+ omitProperties,
+ pageToken,
+ schemaName);
}
@Override
@@ -105,6 +138,8 @@ public String toString() {
.add("catalogName", catalogName)
.add("includeDeltaMetadata", includeDeltaMetadata)
.add("maxResults", maxResults)
+ .add("omitColumns", omitColumns)
+ .add("omitProperties", omitProperties)
.add("pageToken", pageToken)
.add("schemaName", schemaName)
.toString();
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesResponse.java
index 270f323b0..429103c82 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesResponse.java
@@ -11,8 +11,8 @@
@Generated
public class ListTablesResponse {
/**
- * Opaque token for pagination. Omitted if there are no more results. page_token should be set to
- * this value for fetching the next page.
+ * Opaque token to retrieve the next page of results. Absent if there are no more pages.
+ * __page_token__ should be set to this value for the next request (for the next page of results).
*/
@JsonProperty("next_page_token")
private String nextPageToken;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java
index f531e8926..64eb74554 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java
@@ -3,6 +3,7 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.Paginator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -80,11 +81,22 @@ public Iterable list(String catalogName) {
*
Gets an array of schemas for a catalog in the metastore. If the caller is the metastore
* admin or the owner of the parent catalog, all schemas for the catalog will be retrieved.
* Otherwise, only schemas owned by the caller (or for which the caller has the **USE_SCHEMA**
- * privilege) will be retrieved. There is no guarantee of a specific ordering of the elements in
- * the array.
+ * privilege) will be retrieved. For unpaginated request, there is no guarantee of a specific
+ * ordering of the elements in the array. For paginated request, elements are ordered by their
+ * name.
*/
public Iterable list(ListSchemasRequest request) {
- return impl.list(request).getSchemas();
+ return new Paginator<>(
+ request,
+ impl::list,
+ ListSchemasResponse::getSchemas,
+ response -> {
+ String token = response.getNextPageToken();
+ if (token == null) {
+ return null;
+ }
+ return request.setPageToken(token);
+ });
}
public SchemaInfo update(String fullName) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasService.java
index e18efa0d8..68f384c70 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasService.java
@@ -45,8 +45,9 @@ public interface SchemasService {
*
Gets an array of schemas for a catalog in the metastore. If the caller is the metastore
* admin or the owner of the parent catalog, all schemas for the catalog will be retrieved.
* Otherwise, only schemas owned by the caller (or for which the caller has the **USE_SCHEMA**
- * privilege) will be retrieved. There is no guarantee of a specific ordering of the elements in
- * the array.
+ * privilege) will be retrieved. For unpaginated request, there is no guarantee of a specific
+ * ordering of the elements in the array. For paginated request, elements are ordered by their
+ * name.
*/
ListSchemasResponse list(ListSchemasRequest listSchemasRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java
index 268bf4ab0..1c20bfd60 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java
@@ -3,6 +3,7 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.Paginator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -81,11 +82,22 @@ public StorageCredentialInfo get(GetStorageCredentialRequest request) {
*
*
Gets an array of storage credentials (as __StorageCredentialInfo__ objects). The array is
* limited to only those storage credentials the caller has permission to access. If the caller is
- * a metastore admin, all storage credentials will be retrieved. There is no guarantee of a
- * specific ordering of the elements in the array.
+ * a metastore admin, retrieval of credentials is unrestricted. For unpaginated request, there is
+ * no guarantee of a specific ordering of the elements in the array. For paginated request,
+ * elements are ordered by their name.
*/
- public Iterable list() {
- return impl.list().getStorageCredentials();
+ public Iterable list(ListStorageCredentialsRequest request) {
+ return new Paginator<>(
+ request,
+ impl::list,
+ ListStorageCredentialsResponse::getStorageCredentials,
+ response -> {
+ String token = response.getNextPageToken();
+ if (token == null) {
+ return null;
+ }
+ return request.setPageToken(token);
+ });
}
public StorageCredentialInfo update(String name) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsImpl.java
index 10a3743b1..080a9d15f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsImpl.java
@@ -41,11 +41,11 @@ public StorageCredentialInfo get(GetStorageCredentialRequest request) {
}
@Override
- public ListStorageCredentialsResponse list() {
+ public ListStorageCredentialsResponse list(ListStorageCredentialsRequest request) {
String path = "/api/2.1/unity-catalog/storage-credentials";
Map headers = new HashMap<>();
headers.put("Accept", "application/json");
- return apiClient.GET(path, ListStorageCredentialsResponse.class, headers);
+ return apiClient.GET(path, request, ListStorageCredentialsResponse.class, headers);
}
@Override
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java
index 39f43a059..d6de7f68f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java
@@ -50,10 +50,11 @@ public interface StorageCredentialsService {
*
*
Gets an array of storage credentials (as __StorageCredentialInfo__ objects). The array is
* limited to only those storage credentials the caller has permission to access. If the caller is
- * a metastore admin, all storage credentials will be retrieved. There is no guarantee of a
- * specific ordering of the elements in the array.
+ * a metastore admin, retrieval of credentials is unrestricted. For unpaginated request, there is
+ * no guarantee of a specific ordering of the elements in the array. For paginated request,
+ * elements are ordered by their name.
*/
- ListStorageCredentialsResponse list();
+ ListStorageCredentialsResponse list(ListStorageCredentialsRequest listStorageCredentialsRequest);
/**
* Update a credential.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java
index 06ff04f9b..3b65c8ab1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java
@@ -102,11 +102,11 @@ public Iterable listSummaries(String catalogName) {
*
Gets an array of summaries for tables for a schema and catalog within the metastore. The
* table summaries returned are either:
*
- *
* summaries for all tables (within the current metastore and parent catalog and schema),
- * when the user is a metastore admin, or: * summaries for all tables and schemas (within the
- * current metastore and parent catalog) for which the user has ownership or the **SELECT**
- * privilege on the table and ownership or **USE_SCHEMA** privilege on the schema, provided that
- * the user also has ownership or the **USE_CATALOG** privilege on the parent catalog.
+ *
* summaries for tables (within the current metastore and parent catalog and schema), when
+ * the user is a metastore admin, or: * summaries for tables and schemas (within the current
+ * metastore and parent catalog) for which the user has ownership or the **SELECT** privilege on
+ * the table and ownership or **USE_SCHEMA** privilege on the schema, provided that the user also
+ * has ownership or the **USE_CATALOG** privilege on the parent catalog.
*
*
There is no guarantee of a specific ordering of the elements in the array.
*/
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java
index 0585368ba..c5c016f49 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java
@@ -56,11 +56,11 @@ public interface TablesService {
*
Gets an array of summaries for tables for a schema and catalog within the metastore. The
* table summaries returned are either:
*
- *
* summaries for all tables (within the current metastore and parent catalog and schema),
- * when the user is a metastore admin, or: * summaries for all tables and schemas (within the
- * current metastore and parent catalog) for which the user has ownership or the **SELECT**
- * privilege on the table and ownership or **USE_SCHEMA** privilege on the schema, provided that
- * the user also has ownership or the **USE_CATALOG** privilege on the parent catalog.
+ *
* summaries for tables (within the current metastore and parent catalog and schema), when
+ * the user is a metastore admin, or: * summaries for tables and schemas (within the current
+ * metastore and parent catalog) for which the user has ownership or the **SELECT** privilege on
+ * the table and ownership or **USE_SCHEMA** privilege on the schema, provided that the user also
+ * has ownership or the **USE_CATALOG** privilege on the parent catalog.
*
*
There is no guarantee of a specific ordering of the elements in the array.
*/
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java
new file mode 100755
index 000000000..bc55fbe21
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java
@@ -0,0 +1,45 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * These APIs provide specific management operations for Lakeview dashboards. Generic resource
+ * management can be done with Workspace API (import, export, get-status, list, delete).
+ */
+@Generated
+public class LakeviewAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(LakeviewAPI.class);
+
+ private final LakeviewService impl;
+
+ /** Regular-use constructor */
+ public LakeviewAPI(ApiClient apiClient) {
+ impl = new LakeviewImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public LakeviewAPI(LakeviewService mock) {
+ impl = mock;
+ }
+
+ public void publish(String dashboardId) {
+ publish(new PublishRequest().setDashboardId(dashboardId));
+ }
+
+ /**
+ * Publish dashboard.
+ *
+ *
Publish the current draft dashboard.
+ */
+ public void publish(PublishRequest request) {
+ impl.publish(request);
+ }
+
+ public LakeviewService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java
new file mode 100755
index 000000000..0dbc3d738
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java
@@ -0,0 +1,27 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import java.util.HashMap;
+import java.util.Map;
+
+/** Package-local implementation of Lakeview */
+@Generated
+class LakeviewImpl implements LakeviewService {
+ private final ApiClient apiClient;
+
+ public LakeviewImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public void publish(PublishRequest request) {
+ String path =
+ String.format("/api/2.0/lakeview/dashboards/%s/published", request.getDashboardId());
+ Map headers = new HashMap<>();
+ headers.put("Accept", "application/json");
+ headers.put("Content-Type", "application/json");
+ apiClient.POST(path, request, Void.class, headers);
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewService.java
new file mode 100755
index 000000000..c283a65ae
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewService.java
@@ -0,0 +1,22 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * These APIs provide specific management operations for Lakeview dashboards. Generic resource
+ * management can be done with Workspace API (import, export, get-status, list, delete).
+ *
+ *
This is the high-level interface, that contains generated methods.
+ *
+ *
Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface LakeviewService {
+ /**
+ * Publish dashboard.
+ *
+ *
Publish the current draft dashboard.
+ */
+ void publish(PublishRequest publishRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishRequest.java
new file mode 100755
index 000000000..fc4167089
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishRequest.java
@@ -0,0 +1,78 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class PublishRequest {
+ /** UUID identifying the dashboard to be published. */
+ private String dashboardId;
+
+ /**
+ * Flag to indicate if the publisher's credentials should be embedded in the published dashboard.
+ * These embedded credentials will be used to execute the published dashboard's queries.
+ */
+ @JsonProperty("embed_credentials")
+ private Boolean embedCredentials;
+
+ /**
+ * The ID of the warehouse that can be used to override the warehouse which was set in the draft.
+ */
+ @JsonProperty("warehouse_id")
+ private String warehouseId;
+
+ public PublishRequest setDashboardId(String dashboardId) {
+ this.dashboardId = dashboardId;
+ return this;
+ }
+
+ public String getDashboardId() {
+ return dashboardId;
+ }
+
+ public PublishRequest setEmbedCredentials(Boolean embedCredentials) {
+ this.embedCredentials = embedCredentials;
+ return this;
+ }
+
+ public Boolean getEmbedCredentials() {
+ return embedCredentials;
+ }
+
+ public PublishRequest setWarehouseId(String warehouseId) {
+ this.warehouseId = warehouseId;
+ return this;
+ }
+
+ public String getWarehouseId() {
+ return warehouseId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ PublishRequest that = (PublishRequest) o;
+ return Objects.equals(dashboardId, that.dashboardId)
+ && Objects.equals(embedCredentials, that.embedCredentials)
+ && Objects.equals(warehouseId, that.warehouseId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(dashboardId, embedCredentials, warehouseId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(PublishRequest.class)
+ .add("dashboardId", dashboardId)
+ .add("embedCredentials", embedCredentials)
+ .add("warehouseId", warehouseId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UserSchema.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UserSchema.java
index 23056e6f6..f8e350626 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UserSchema.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UserSchema.java
@@ -9,4 +9,7 @@
public enum UserSchema {
@JsonProperty("urn:ietf:params:scim:schemas:core:2.0:User")
URN_IETF_PARAMS_SCIM_SCHEMAS_CORE_2_0_USER,
+
+ @JsonProperty("urn:ietf:params:scim:schemas:extension:workspace:2.0:User")
+ URN_IETF_PARAMS_SCIM_SCHEMAS_EXTENSION_WORKSPACE_2_0_USER,
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java
index 3d3d7f5d0..c731a3fb2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java
@@ -306,10 +306,10 @@ public void reset(long jobId, JobSettings newSettings) {
}
/**
- * Overwrite all settings for a job.
+ * Update all job settings (reset).
*
- *
Overwrite all settings for the given job. Use the Update endpoint to update job settings
- * partially.
+ *
Overwrite all settings for the given job. Use the [_Update_ endpoint](:method:jobs/update)
+ * to update job settings partially.
*/
public void reset(ResetJob request) {
impl.reset(request);
@@ -365,10 +365,10 @@ public void update(long jobId) {
}
/**
- * Partially update a job.
+ * Update job settings partially.
*
- *
Add, update, or remove specific settings of an existing job. Use the ResetJob to overwrite
- * all job settings.
+ *
Add, update, or remove specific settings of an existing job. Use the [_Reset_
+ * endpoint](:method:jobs/reset) to overwrite all job settings.
*/
public void update(UpdateJob request) {
impl.update(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java
index 6b14958ec..57433b9a8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java
@@ -139,10 +139,10 @@ GetJobPermissionLevelsResponse getPermissionLevels(
RepairRunResponse repairRun(RepairRun repairRun);
/**
- * Overwrite all settings for a job.
+ * Update all job settings (reset).
*
- *
Overwrite all settings for the given job. Use the Update endpoint to update job settings
- * partially.
+ *
Overwrite all settings for the given job. Use the [_Update_ endpoint](:method:jobs/update)
+ * to update job settings partially.
*/
void reset(ResetJob resetJob);
@@ -170,10 +170,10 @@ GetJobPermissionLevelsResponse getPermissionLevels(
SubmitRunResponse submit(SubmitRun submitRun);
/**
- * Partially update a job.
+ * Update job settings partially.
*
- *
Add, update, or remove specific settings of an existing job. Use the ResetJob to overwrite
- * all job settings.
+ *
Add, update, or remove specific settings of an existing job. Use the [_Reset_
+ * endpoint](:method:jobs/reset) to overwrite all job settings.
*/
void update(UpdateJob updateJob);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateTokenRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateTokenRequest.java
index 0acfe0631..79f4feea2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateTokenRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateTokenRequest.java
@@ -16,7 +16,7 @@ public class CreateTokenRequest {
/**
* The lifetime of the token, in seconds.
*
- *
If the ifetime is not specified, this token remains valid indefinitely.
+ *
If the lifetime is not specified, this token remains valid indefinitely.
*/
@JsonProperty("lifetime_seconds")
private Long lifetimeSeconds;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessListRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessListRequest.java
index a101054b5..9e56fff99 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessListRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessListRequest.java
@@ -9,7 +9,7 @@
/** Delete access list */
@Generated
public class DeleteAccountIpAccessListRequest {
- /** */
+ /** The ID for the corresponding IP access list */
private String ipAccessListId;
public DeleteAccountIpAccessListRequest setIpAccessListId(String ipAccessListId) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteIpAccessListRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteIpAccessListRequest.java
index 9135f52a2..5ffa5f734 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteIpAccessListRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteIpAccessListRequest.java
@@ -9,7 +9,7 @@
/** Delete access list */
@Generated
public class DeleteIpAccessListRequest {
- /** The ID for the corresponding IP access list to modify */
+ /** The ID for the corresponding IP access list */
private String ipAccessListId;
public DeleteIpAccessListRequest setIpAccessListId(String ipAccessListId) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAccountIpAccessListRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAccountIpAccessListRequest.java
index 7d0820c04..b35401394 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAccountIpAccessListRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAccountIpAccessListRequest.java
@@ -9,7 +9,7 @@
/** Get IP access list */
@Generated
public class GetAccountIpAccessListRequest {
- /** */
+ /** The ID for the corresponding IP access list */
private String ipAccessListId;
public GetAccountIpAccessListRequest setIpAccessListId(String ipAccessListId) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListRequest.java
index 54056fc88..f4efb6898 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListRequest.java
@@ -9,7 +9,7 @@
/** Get access list */
@Generated
public class GetIpAccessListRequest {
- /** The ID for the corresponding IP access list to modify */
+ /** The ID for the corresponding IP access list */
private String ipAccessListId;
public GetIpAccessListRequest setIpAccessListId(String ipAccessListId) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPublicTokensResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPublicTokensResponse.java
new file mode 100755
index 000000000..445c7789f
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPublicTokensResponse.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class ListPublicTokensResponse {
+ /** The information for each token. */
+ @JsonProperty("token_infos")
+ private Collection tokenInfos;
+
+ public ListPublicTokensResponse setTokenInfos(Collection tokenInfos) {
+ this.tokenInfos = tokenInfos;
+ return this;
+ }
+
+ public Collection getTokenInfos() {
+ return tokenInfos;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListPublicTokensResponse that = (ListPublicTokensResponse) o;
+ return Objects.equals(tokenInfos, that.tokenInfos);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(tokenInfos);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListPublicTokensResponse.class).add("tokenInfos", tokenInfos).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceIpAccessList.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceIpAccessList.java
index ac6fa8392..304953e8f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceIpAccessList.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceIpAccessList.java
@@ -15,7 +15,7 @@ public class ReplaceIpAccessList {
@JsonProperty("enabled")
private Boolean enabled;
- /** The ID for the corresponding IP access list to modify */
+ /** The ID for the corresponding IP access list */
private String ipAccessListId;
/** */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensAPI.java
index c65f75efe..8058491e5 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensAPI.java
@@ -58,7 +58,7 @@ public void delete(RevokeTokenRequest request) {
*
*
Lists all the valid tokens for a user-workspace pair.
*/
- public Iterable list() {
+ public Iterable list() {
return impl.list().getTokenInfos();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensImpl.java
index 44019a6a8..58ea13a2e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensImpl.java
@@ -34,10 +34,10 @@ public void delete(RevokeTokenRequest request) {
}
@Override
- public ListTokensResponse list() {
+ public ListPublicTokensResponse list() {
String path = "/api/2.0/token/list";
Map headers = new HashMap<>();
headers.put("Accept", "application/json");
- return apiClient.GET(path, ListTokensResponse.class, headers);
+ return apiClient.GET(path, ListPublicTokensResponse.class, headers);
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensService.java
index b10c451fd..3de7c4d2e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensService.java
@@ -37,5 +37,5 @@ public interface TokensService {
*
*
Lists all the valid tokens for a user-workspace pair.
*/
- ListTokensResponse list();
+ ListPublicTokensResponse list();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateIpAccessList.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateIpAccessList.java
index 32586c1ef..4dca9ed0e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateIpAccessList.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateIpAccessList.java
@@ -15,7 +15,7 @@ public class UpdateIpAccessList {
@JsonProperty("enabled")
private Boolean enabled;
- /** The ID for the corresponding IP access list to modify */
+ /** The ID for the corresponding IP access list */
private String ipAccessListId;
/** */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListCleanRoomsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListCleanRoomsRequest.java
index 6dfeb1c05..4a767ab47 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListCleanRoomsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListCleanRoomsRequest.java
@@ -10,11 +10,17 @@
/** List clean rooms */
@Generated
public class ListCleanRoomsRequest {
- /** Maximum number of clean rooms to return. */
+ /**
+ * Maximum number of clean rooms to return. If not set, all the clean rooms are returned (not
+ * recommended). - when set to a value greater than 0, the page length is the minimum of this
+ * value and a server configured value; - when set to 0, the page length is set to a server
+ * configured value (recommended); - when set to a value less than 0, an invalid parameter error
+ * is returned;
+ */
@QueryParam("max_results")
private Long maxResults;
- /** Pagination token to go to next page based on previous query. */
+ /** Opaque pagination token to go to next page based on previous query. */
@QueryParam("page_token")
private String pageToken;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListCleanRoomsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListCleanRoomsResponse.java
index 3847f8a28..cadcaa6fe 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListCleanRoomsResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListCleanRoomsResponse.java
@@ -14,7 +14,10 @@ public class ListCleanRoomsResponse {
@JsonProperty("clean_rooms")
private Collection cleanRooms;
- /** Token to retrieve the next page of results. Absent if there are no more pages. */
+ /**
+ * Opaque token to retrieve the next page of results. Absent if there are no more pages.
+ * __page_token__ should be set to this value for the next request (for the next page of results).
+ */
@JsonProperty("next_page_token")
private String nextPageToken;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AccessControl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AccessControl.java
index 931e75fbe..7e1cb0289 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AccessControl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AccessControl.java
@@ -14,8 +14,8 @@ public class AccessControl {
private String groupName;
/**
- * * `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_MANAGE`: Can manage the
- * query
+ * * `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_EDIT`: Can edit the
+ * query * `CAN_MANAGE`: Can manage the query
*/
@JsonProperty("permission_level")
private PermissionLevel permissionLevel;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java
index 3b9d4cbb0..8e0f647a1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java
@@ -4,6 +4,7 @@
import com.databricks.sdk.support.Generated;
+/** Name of the channel */
@Generated
public enum ChannelName {
CHANNEL_NAME_CURRENT,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Dashboard.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Dashboard.java
index 3163cc7f8..486d579ae 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Dashboard.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Dashboard.java
@@ -64,8 +64,8 @@ public class Dashboard {
private String parent;
/**
- * * `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_MANAGE`: Can manage the
- * query
+ * * `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_EDIT`: Can edit the
+ * query * `CAN_MANAGE`: Can manage the query
*/
@JsonProperty("permission_tier")
private PermissionLevel permissionTier;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/PermissionLevel.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/PermissionLevel.java
index 4fb7dfcc6..090131c5d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/PermissionLevel.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/PermissionLevel.java
@@ -5,11 +5,12 @@
import com.databricks.sdk.support.Generated;
/**
- * * `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_MANAGE`: Can manage the
- * query
+ * * `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_EDIT`: Can edit the query
+ * * `CAN_MANAGE`: Can manage the query
*/
@Generated
public enum PermissionLevel {
+ CAN_EDIT, // Can edit the query
CAN_MANAGE, // Can manage the query
CAN_RUN, // Can run the query
CAN_VIEW, // Can view the query
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Query.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Query.java
index 6f0bdc83f..a847c64ae 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Query.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Query.java
@@ -95,8 +95,8 @@ public class Query {
private String parent;
/**
- * * `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_MANAGE`: Can manage the
- * query
+ * * `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_EDIT`: Can edit the
+ * query * `CAN_MANAGE`: Can manage the query
*/
@JsonProperty("permission_tier")
private PermissionLevel permissionTier;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ColumnInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ColumnInfo.java
new file mode 100755
index 000000000..e403d7e5a
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ColumnInfo.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class ColumnInfo {
+ /** Name of the column. */
+ @JsonProperty("name")
+ private String name;
+
+ public ColumnInfo setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ColumnInfo that = (ColumnInfo) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ColumnInfo.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateEndpoint.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateEndpoint.java
new file mode 100755
index 000000000..5e6ab7a73
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateEndpoint.java
@@ -0,0 +1,58 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateEndpoint {
+ /** Type of endpoint. */
+ @JsonProperty("endpoint_type")
+ private EndpointType endpointType;
+
+ /** Name of endpoint */
+ @JsonProperty("name")
+ private String name;
+
+ public CreateEndpoint setEndpointType(EndpointType endpointType) {
+ this.endpointType = endpointType;
+ return this;
+ }
+
+ public EndpointType getEndpointType() {
+ return endpointType;
+ }
+
+ public CreateEndpoint setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateEndpoint that = (CreateEndpoint) o;
+ return Objects.equals(endpointType, that.endpointType) && Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(endpointType, name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateEndpoint.class)
+ .add("endpointType", endpointType)
+ .add("name", name)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateVectorIndexRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateVectorIndexRequest.java
new file mode 100755
index 000000000..b0ac45ae8
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateVectorIndexRequest.java
@@ -0,0 +1,129 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateVectorIndexRequest {
+ /** Specification for Delta Sync Index. Required if `index_type` is `DELTA_SYNC`. */
+ @JsonProperty("delta_sync_vector_index_spec")
+ private DeltaSyncVectorIndexSpecRequest deltaSyncVectorIndexSpec;
+
+ /** Specification for Direct Vector Access Index. Required if `index_type` is `DIRECT_ACCESS`. */
+ @JsonProperty("direct_access_index_spec")
+ private DirectAccessVectorIndexSpec directAccessIndexSpec;
+
+ /** Name of the endpoint to be used for serving the index */
+ @JsonProperty("endpoint_name")
+ private String endpointName;
+
+ /**
+ * There are 2 types of Vector Search indexes:
+ *
+ *
- `DELTA_SYNC`: An index that automatically syncs with a source Delta Table, automatically
+ * and incrementally updating the index as the underlying data in the Delta Table changes. -
+ * `DIRECT_ACCESS`: An index that supports direct read and write of vectors and metadata through
+ * our REST and SDK APIs. With this model, the user manages index updates.
+ */
+ @JsonProperty("index_type")
+ private VectorIndexType indexType;
+
+ /** Name of the index */
+ @JsonProperty("name")
+ private String name;
+
+ /** Primary key of the index */
+ @JsonProperty("primary_key")
+ private String primaryKey;
+
+ public CreateVectorIndexRequest setDeltaSyncVectorIndexSpec(
+ DeltaSyncVectorIndexSpecRequest deltaSyncVectorIndexSpec) {
+ this.deltaSyncVectorIndexSpec = deltaSyncVectorIndexSpec;
+ return this;
+ }
+
+ public DeltaSyncVectorIndexSpecRequest getDeltaSyncVectorIndexSpec() {
+ return deltaSyncVectorIndexSpec;
+ }
+
+ public CreateVectorIndexRequest setDirectAccessIndexSpec(
+ DirectAccessVectorIndexSpec directAccessIndexSpec) {
+ this.directAccessIndexSpec = directAccessIndexSpec;
+ return this;
+ }
+
+ public DirectAccessVectorIndexSpec getDirectAccessIndexSpec() {
+ return directAccessIndexSpec;
+ }
+
+ public CreateVectorIndexRequest setEndpointName(String endpointName) {
+ this.endpointName = endpointName;
+ return this;
+ }
+
+ public String getEndpointName() {
+ return endpointName;
+ }
+
+ public CreateVectorIndexRequest setIndexType(VectorIndexType indexType) {
+ this.indexType = indexType;
+ return this;
+ }
+
+ public VectorIndexType getIndexType() {
+ return indexType;
+ }
+
+ public CreateVectorIndexRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public CreateVectorIndexRequest setPrimaryKey(String primaryKey) {
+ this.primaryKey = primaryKey;
+ return this;
+ }
+
+ public String getPrimaryKey() {
+ return primaryKey;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateVectorIndexRequest that = (CreateVectorIndexRequest) o;
+ return Objects.equals(deltaSyncVectorIndexSpec, that.deltaSyncVectorIndexSpec)
+ && Objects.equals(directAccessIndexSpec, that.directAccessIndexSpec)
+ && Objects.equals(endpointName, that.endpointName)
+ && Objects.equals(indexType, that.indexType)
+ && Objects.equals(name, that.name)
+ && Objects.equals(primaryKey, that.primaryKey);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ deltaSyncVectorIndexSpec, directAccessIndexSpec, endpointName, indexType, name, primaryKey);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateVectorIndexRequest.class)
+ .add("deltaSyncVectorIndexSpec", deltaSyncVectorIndexSpec)
+ .add("directAccessIndexSpec", directAccessIndexSpec)
+ .add("endpointName", endpointName)
+ .add("indexType", indexType)
+ .add("name", name)
+ .add("primaryKey", primaryKey)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateVectorIndexResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateVectorIndexResponse.java
new file mode 100755
index 000000000..cc483c43c
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateVectorIndexResponse.java
@@ -0,0 +1,44 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateVectorIndexResponse {
+ /** */
+ @JsonProperty("vector_index")
+ private VectorIndex vectorIndex;
+
+ public CreateVectorIndexResponse setVectorIndex(VectorIndex vectorIndex) {
+ this.vectorIndex = vectorIndex;
+ return this;
+ }
+
+ public VectorIndex getVectorIndex() {
+ return vectorIndex;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateVectorIndexResponse that = (CreateVectorIndexResponse) o;
+ return Objects.equals(vectorIndex, that.vectorIndex);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(vectorIndex);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateVectorIndexResponse.class)
+ .add("vectorIndex", vectorIndex)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataResult.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataResult.java
new file mode 100755
index 000000000..00df1c370
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataResult.java
@@ -0,0 +1,61 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Result of the upsert or delete operation. */
+@Generated
+public class DeleteDataResult {
+ /** List of primary keys for rows that failed to process. */
+ @JsonProperty("failed_primary_keys")
+ private Collection failedPrimaryKeys;
+
+ /** Count of successfully processed rows. */
+ @JsonProperty("success_row_count")
+ private Long successRowCount;
+
+ public DeleteDataResult setFailedPrimaryKeys(Collection failedPrimaryKeys) {
+ this.failedPrimaryKeys = failedPrimaryKeys;
+ return this;
+ }
+
+ public Collection getFailedPrimaryKeys() {
+ return failedPrimaryKeys;
+ }
+
+ public DeleteDataResult setSuccessRowCount(Long successRowCount) {
+ this.successRowCount = successRowCount;
+ return this;
+ }
+
+ public Long getSuccessRowCount() {
+ return successRowCount;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteDataResult that = (DeleteDataResult) o;
+ return Objects.equals(failedPrimaryKeys, that.failedPrimaryKeys)
+ && Objects.equals(successRowCount, that.successRowCount);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(failedPrimaryKeys, successRowCount);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteDataResult.class)
+ .add("failedPrimaryKeys", failedPrimaryKeys)
+ .add("successRowCount", successRowCount)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataStatus.java
new file mode 100755
index 000000000..dcde6f693
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataStatus.java
@@ -0,0 +1,13 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+
+/** Status of the delete operation. */
+@Generated
+public enum DeleteDataStatus {
+ FAILURE,
+ PARTIAL_SUCCESS,
+ SUCCESS,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataVectorIndexRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataVectorIndexRequest.java
new file mode 100755
index 000000000..281799d92
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataVectorIndexRequest.java
@@ -0,0 +1,59 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Request payload for deleting data from a vector index. */
+@Generated
+public class DeleteDataVectorIndexRequest {
+ /** Name of the vector index where data is to be deleted. Must be a Direct Vector Access Index. */
+ private String name;
+
+ /** List of primary keys for the data to be deleted. */
+ @JsonProperty("primary_keys")
+ private Collection primaryKeys;
+
+ public DeleteDataVectorIndexRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public DeleteDataVectorIndexRequest setPrimaryKeys(Collection primaryKeys) {
+ this.primaryKeys = primaryKeys;
+ return this;
+ }
+
+ public Collection getPrimaryKeys() {
+ return primaryKeys;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteDataVectorIndexRequest that = (DeleteDataVectorIndexRequest) o;
+ return Objects.equals(name, that.name) && Objects.equals(primaryKeys, that.primaryKeys);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name, primaryKeys);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteDataVectorIndexRequest.class)
+ .add("name", name)
+ .add("primaryKeys", primaryKeys)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataVectorIndexResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataVectorIndexResponse.java
new file mode 100755
index 000000000..bed9d9e1d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataVectorIndexResponse.java
@@ -0,0 +1,59 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Response to a delete data vector index request. */
+@Generated
+public class DeleteDataVectorIndexResponse {
+ /** Result of the upsert or delete operation. */
+ @JsonProperty("result")
+ private DeleteDataResult result;
+
+ /** Status of the delete operation. */
+ @JsonProperty("status")
+ private DeleteDataStatus status;
+
+ public DeleteDataVectorIndexResponse setResult(DeleteDataResult result) {
+ this.result = result;
+ return this;
+ }
+
+ public DeleteDataResult getResult() {
+ return result;
+ }
+
+ public DeleteDataVectorIndexResponse setStatus(DeleteDataStatus status) {
+ this.status = status;
+ return this;
+ }
+
+ public DeleteDataStatus getStatus() {
+ return status;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteDataVectorIndexResponse that = (DeleteDataVectorIndexResponse) o;
+ return Objects.equals(result, that.result) && Objects.equals(status, that.status);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(result, status);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteDataVectorIndexResponse.class)
+ .add("result", result)
+ .add("status", status)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteEndpointRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteEndpointRequest.java
new file mode 100755
index 000000000..13a48964d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteEndpointRequest.java
@@ -0,0 +1,56 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+/** Delete an endpoint */
+@Generated
+public class DeleteEndpointRequest {
+ /** Name of the endpoint */
+ private String endpointName;
+
+ /** Name of the endpoint to delete */
+ private String name;
+
+ public DeleteEndpointRequest setEndpointName(String endpointName) {
+ this.endpointName = endpointName;
+ return this;
+ }
+
+ public String getEndpointName() {
+ return endpointName;
+ }
+
+ public DeleteEndpointRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteEndpointRequest that = (DeleteEndpointRequest) o;
+ return Objects.equals(endpointName, that.endpointName) && Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(endpointName, name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteEndpointRequest.class)
+ .add("endpointName", endpointName)
+ .add("name", name)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteIndexRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteIndexRequest.java
new file mode 100755
index 000000000..b3c15fe0a
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteIndexRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+/** Delete an index */
+@Generated
+public class DeleteIndexRequest {
+ /** Name of the index */
+ private String indexName;
+
+ public DeleteIndexRequest setIndexName(String indexName) {
+ this.indexName = indexName;
+ return this;
+ }
+
+ public String getIndexName() {
+ return indexName;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteIndexRequest that = (DeleteIndexRequest) o;
+ return Objects.equals(indexName, that.indexName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(indexName);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteIndexRequest.class).add("indexName", indexName).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecRequest.java
new file mode 100755
index 000000000..df8771846
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecRequest.java
@@ -0,0 +1,100 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class DeltaSyncVectorIndexSpecRequest {
+ /** The columns that contain the embedding source. */
+ @JsonProperty("embedding_source_columns")
+ private Collection embeddingSourceColumns;
+
+ /** The columns that contain the embedding vectors. */
+ @JsonProperty("embedding_vector_columns")
+ private Collection embeddingVectorColumns;
+
+ /**
+ * Pipeline execution mode.
+ *
+ *
- `TRIGGERED`: If the pipeline uses the triggered execution mode, the system stops
+ * processing after successfully refreshing the source table in the pipeline once, ensuring the
+ * table is updated based on the data available when the update started. - `CONTINUOUS`: If the
+ * pipeline uses continuous execution, the pipeline processes new data as it arrives in the source
+ * table to keep vector index fresh.
+ */
+ @JsonProperty("pipeline_type")
+ private PipelineType pipelineType;
+
+ /** The name of the source table. */
+ @JsonProperty("source_table")
+ private String sourceTable;
+
+ public DeltaSyncVectorIndexSpecRequest setEmbeddingSourceColumns(
+ Collection embeddingSourceColumns) {
+ this.embeddingSourceColumns = embeddingSourceColumns;
+ return this;
+ }
+
+ public Collection getEmbeddingSourceColumns() {
+ return embeddingSourceColumns;
+ }
+
+ public DeltaSyncVectorIndexSpecRequest setEmbeddingVectorColumns(
+ Collection embeddingVectorColumns) {
+ this.embeddingVectorColumns = embeddingVectorColumns;
+ return this;
+ }
+
+ public Collection getEmbeddingVectorColumns() {
+ return embeddingVectorColumns;
+ }
+
+ public DeltaSyncVectorIndexSpecRequest setPipelineType(PipelineType pipelineType) {
+ this.pipelineType = pipelineType;
+ return this;
+ }
+
+ public PipelineType getPipelineType() {
+ return pipelineType;
+ }
+
+ public DeltaSyncVectorIndexSpecRequest setSourceTable(String sourceTable) {
+ this.sourceTable = sourceTable;
+ return this;
+ }
+
+ public String getSourceTable() {
+ return sourceTable;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeltaSyncVectorIndexSpecRequest that = (DeltaSyncVectorIndexSpecRequest) o;
+ return Objects.equals(embeddingSourceColumns, that.embeddingSourceColumns)
+ && Objects.equals(embeddingVectorColumns, that.embeddingVectorColumns)
+ && Objects.equals(pipelineType, that.pipelineType)
+ && Objects.equals(sourceTable, that.sourceTable);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(embeddingSourceColumns, embeddingVectorColumns, pipelineType, sourceTable);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeltaSyncVectorIndexSpecRequest.class)
+ .add("embeddingSourceColumns", embeddingSourceColumns)
+ .add("embeddingVectorColumns", embeddingVectorColumns)
+ .add("pipelineType", pipelineType)
+ .add("sourceTable", sourceTable)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecResponse.java
new file mode 100755
index 000000000..046193b0a
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecResponse.java
@@ -0,0 +1,116 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class DeltaSyncVectorIndexSpecResponse {
+ /** The columns that contain the embedding source. */
+ @JsonProperty("embedding_source_columns")
+ private Collection embeddingSourceColumns;
+
+ /** The columns that contain the embedding vectors. */
+ @JsonProperty("embedding_vector_columns")
+ private Collection embeddingVectorColumns;
+
+ /** The ID of the pipeline that is used to sync the index. */
+ @JsonProperty("pipeline_id")
+ private String pipelineId;
+
+ /**
+ * Pipeline execution mode.
+ *
+ *
- `TRIGGERED`: If the pipeline uses the triggered execution mode, the system stops
+ * processing after successfully refreshing the source table in the pipeline once, ensuring the
+ * table is updated based on the data available when the update started. - `CONTINUOUS`: If the
+ * pipeline uses continuous execution, the pipeline processes new data as it arrives in the source
+ * table to keep vector index fresh.
+ */
+ @JsonProperty("pipeline_type")
+ private PipelineType pipelineType;
+
+ /** The name of the source table. */
+ @JsonProperty("source_table")
+ private String sourceTable;
+
+ public DeltaSyncVectorIndexSpecResponse setEmbeddingSourceColumns(
+ Collection embeddingSourceColumns) {
+ this.embeddingSourceColumns = embeddingSourceColumns;
+ return this;
+ }
+
+ public Collection getEmbeddingSourceColumns() {
+ return embeddingSourceColumns;
+ }
+
+ public DeltaSyncVectorIndexSpecResponse setEmbeddingVectorColumns(
+ Collection embeddingVectorColumns) {
+ this.embeddingVectorColumns = embeddingVectorColumns;
+ return this;
+ }
+
+ public Collection getEmbeddingVectorColumns() {
+ return embeddingVectorColumns;
+ }
+
+ public DeltaSyncVectorIndexSpecResponse setPipelineId(String pipelineId) {
+ this.pipelineId = pipelineId;
+ return this;
+ }
+
+ public String getPipelineId() {
+ return pipelineId;
+ }
+
+ public DeltaSyncVectorIndexSpecResponse setPipelineType(PipelineType pipelineType) {
+ this.pipelineType = pipelineType;
+ return this;
+ }
+
+ public PipelineType getPipelineType() {
+ return pipelineType;
+ }
+
+ public DeltaSyncVectorIndexSpecResponse setSourceTable(String sourceTable) {
+ this.sourceTable = sourceTable;
+ return this;
+ }
+
+ public String getSourceTable() {
+ return sourceTable;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeltaSyncVectorIndexSpecResponse that = (DeltaSyncVectorIndexSpecResponse) o;
+ return Objects.equals(embeddingSourceColumns, that.embeddingSourceColumns)
+ && Objects.equals(embeddingVectorColumns, that.embeddingVectorColumns)
+ && Objects.equals(pipelineId, that.pipelineId)
+ && Objects.equals(pipelineType, that.pipelineType)
+ && Objects.equals(sourceTable, that.sourceTable);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ embeddingSourceColumns, embeddingVectorColumns, pipelineId, pipelineType, sourceTable);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeltaSyncVectorIndexSpecResponse.class)
+ .add("embeddingSourceColumns", embeddingSourceColumns)
+ .add("embeddingVectorColumns", embeddingVectorColumns)
+ .add("pipelineId", pipelineId)
+ .add("pipelineType", pipelineType)
+ .add("sourceTable", sourceTable)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DirectAccessVectorIndexSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DirectAccessVectorIndexSpec.java
new file mode 100755
index 000000000..d2e5886ab
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DirectAccessVectorIndexSpec.java
@@ -0,0 +1,68 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class DirectAccessVectorIndexSpec {
+ /** */
+ @JsonProperty("embedding_vector_columns")
+ private Collection embeddingVectorColumns;
+
+ /**
+ * The schema of the index in JSON format.
+ *
+ *
Supported types for vector column: `array`, `array`,`.
+ */
+ @JsonProperty("schema_json")
+ private String schemaJson;
+
+ public DirectAccessVectorIndexSpec setEmbeddingVectorColumns(
+ Collection embeddingVectorColumns) {
+ this.embeddingVectorColumns = embeddingVectorColumns;
+ return this;
+ }
+
+ public Collection getEmbeddingVectorColumns() {
+ return embeddingVectorColumns;
+ }
+
+ public DirectAccessVectorIndexSpec setSchemaJson(String schemaJson) {
+ this.schemaJson = schemaJson;
+ return this;
+ }
+
+ public String getSchemaJson() {
+ return schemaJson;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DirectAccessVectorIndexSpec that = (DirectAccessVectorIndexSpec) o;
+ return Objects.equals(embeddingVectorColumns, that.embeddingVectorColumns)
+ && Objects.equals(schemaJson, that.schemaJson);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(embeddingVectorColumns, schemaJson);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DirectAccessVectorIndexSpec.class)
+ .add("embeddingVectorColumns", embeddingVectorColumns)
+ .add("schemaJson", schemaJson)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EmbeddingConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EmbeddingConfig.java
new file mode 100755
index 000000000..506dfc0d2
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EmbeddingConfig.java
@@ -0,0 +1,44 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class EmbeddingConfig {
+ /** Name of the embedding model endpoint */
+ @JsonProperty("embedding_model_endpoint_name")
+ private String embeddingModelEndpointName;
+
+ public EmbeddingConfig setEmbeddingModelEndpointName(String embeddingModelEndpointName) {
+ this.embeddingModelEndpointName = embeddingModelEndpointName;
+ return this;
+ }
+
+ public String getEmbeddingModelEndpointName() {
+ return embeddingModelEndpointName;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ EmbeddingConfig that = (EmbeddingConfig) o;
+ return Objects.equals(embeddingModelEndpointName, that.embeddingModelEndpointName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(embeddingModelEndpointName);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(EmbeddingConfig.class)
+ .add("embeddingModelEndpointName", embeddingModelEndpointName)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EmbeddingSourceColumn.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EmbeddingSourceColumn.java
new file mode 100755
index 000000000..090998154
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EmbeddingSourceColumn.java
@@ -0,0 +1,58 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class EmbeddingSourceColumn {
+ /** */
+ @JsonProperty("embedding_config")
+ private EmbeddingConfig embeddingConfig;
+
+ /** Name of the column */
+ @JsonProperty("name")
+ private String name;
+
+ public EmbeddingSourceColumn setEmbeddingConfig(EmbeddingConfig embeddingConfig) {
+ this.embeddingConfig = embeddingConfig;
+ return this;
+ }
+
+ public EmbeddingConfig getEmbeddingConfig() {
+ return embeddingConfig;
+ }
+
+ public EmbeddingSourceColumn setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ EmbeddingSourceColumn that = (EmbeddingSourceColumn) o;
+ return Objects.equals(embeddingConfig, that.embeddingConfig) && Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(embeddingConfig, name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(EmbeddingSourceColumn.class)
+ .add("embeddingConfig", embeddingConfig)
+ .add("name", name)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EmbeddingVectorColumn.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EmbeddingVectorColumn.java
new file mode 100755
index 000000000..dba295871
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EmbeddingVectorColumn.java
@@ -0,0 +1,59 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class EmbeddingVectorColumn {
+ /** Dimension of the embedding vector */
+ @JsonProperty("embedding_dimension")
+ private Long embeddingDimension;
+
+ /** Name of the column */
+ @JsonProperty("name")
+ private String name;
+
+ public EmbeddingVectorColumn setEmbeddingDimension(Long embeddingDimension) {
+ this.embeddingDimension = embeddingDimension;
+ return this;
+ }
+
+ public Long getEmbeddingDimension() {
+ return embeddingDimension;
+ }
+
+ public EmbeddingVectorColumn setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ EmbeddingVectorColumn that = (EmbeddingVectorColumn) o;
+ return Objects.equals(embeddingDimension, that.embeddingDimension)
+ && Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(embeddingDimension, name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(EmbeddingVectorColumn.class)
+ .add("embeddingDimension", embeddingDimension)
+ .add("name", name)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointInfo.java
new file mode 100755
index 000000000..3afdad7fb
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointInfo.java
@@ -0,0 +1,173 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class EndpointInfo {
+ /** Timestamp of endpoint creation */
+ @JsonProperty("creation_timestamp")
+ private Long creationTimestamp;
+
+ /** Creator of the endpoint */
+ @JsonProperty("creator")
+ private String creator;
+
+ /** Current status of the endpoint */
+ @JsonProperty("endpoint_status")
+ private EndpointStatus endpointStatus;
+
+ /** Type of endpoint. */
+ @JsonProperty("endpoint_type")
+ private EndpointType endpointType;
+
+ /** Unique identifier of the endpoint */
+ @JsonProperty("id")
+ private String id;
+
+ /** Timestamp of last update to the endpoint */
+ @JsonProperty("last_updated_timestamp")
+ private Long lastUpdatedTimestamp;
+
+ /** User who last updated the endpoint */
+ @JsonProperty("last_updated_user")
+ private String lastUpdatedUser;
+
+ /** Name of endpoint */
+ @JsonProperty("name")
+ private String name;
+
+ /** Number of indexes on the endpoint */
+ @JsonProperty("num_indexes")
+ private Long numIndexes;
+
+ public EndpointInfo setCreationTimestamp(Long creationTimestamp) {
+ this.creationTimestamp = creationTimestamp;
+ return this;
+ }
+
+ public Long getCreationTimestamp() {
+ return creationTimestamp;
+ }
+
+ public EndpointInfo setCreator(String creator) {
+ this.creator = creator;
+ return this;
+ }
+
+ public String getCreator() {
+ return creator;
+ }
+
+ public EndpointInfo setEndpointStatus(EndpointStatus endpointStatus) {
+ this.endpointStatus = endpointStatus;
+ return this;
+ }
+
+ public EndpointStatus getEndpointStatus() {
+ return endpointStatus;
+ }
+
+ public EndpointInfo setEndpointType(EndpointType endpointType) {
+ this.endpointType = endpointType;
+ return this;
+ }
+
+ public EndpointType getEndpointType() {
+ return endpointType;
+ }
+
+ public EndpointInfo setId(String id) {
+ this.id = id;
+ return this;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public EndpointInfo setLastUpdatedTimestamp(Long lastUpdatedTimestamp) {
+ this.lastUpdatedTimestamp = lastUpdatedTimestamp;
+ return this;
+ }
+
+ public Long getLastUpdatedTimestamp() {
+ return lastUpdatedTimestamp;
+ }
+
+ public EndpointInfo setLastUpdatedUser(String lastUpdatedUser) {
+ this.lastUpdatedUser = lastUpdatedUser;
+ return this;
+ }
+
+ public String getLastUpdatedUser() {
+ return lastUpdatedUser;
+ }
+
+ public EndpointInfo setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public EndpointInfo setNumIndexes(Long numIndexes) {
+ this.numIndexes = numIndexes;
+ return this;
+ }
+
+ public Long getNumIndexes() {
+ return numIndexes;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ EndpointInfo that = (EndpointInfo) o;
+ return Objects.equals(creationTimestamp, that.creationTimestamp)
+ && Objects.equals(creator, that.creator)
+ && Objects.equals(endpointStatus, that.endpointStatus)
+ && Objects.equals(endpointType, that.endpointType)
+ && Objects.equals(id, that.id)
+ && Objects.equals(lastUpdatedTimestamp, that.lastUpdatedTimestamp)
+ && Objects.equals(lastUpdatedUser, that.lastUpdatedUser)
+ && Objects.equals(name, that.name)
+ && Objects.equals(numIndexes, that.numIndexes);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ creationTimestamp,
+ creator,
+ endpointStatus,
+ endpointType,
+ id,
+ lastUpdatedTimestamp,
+ lastUpdatedUser,
+ name,
+ numIndexes);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(EndpointInfo.class)
+ .add("creationTimestamp", creationTimestamp)
+ .add("creator", creator)
+ .add("endpointStatus", endpointStatus)
+ .add("endpointType", endpointType)
+ .add("id", id)
+ .add("lastUpdatedTimestamp", lastUpdatedTimestamp)
+ .add("lastUpdatedUser", lastUpdatedUser)
+ .add("name", name)
+ .add("numIndexes", numIndexes)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointStatus.java
new file mode 100755
index 000000000..7b452f74a
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointStatus.java
@@ -0,0 +1,59 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Status information of an endpoint */
+@Generated
+public class EndpointStatus {
+ /** Additional status message */
+ @JsonProperty("message")
+ private String message;
+
+ /** Current state of the endpoint */
+ @JsonProperty("state")
+ private EndpointStatusState state;
+
+ public EndpointStatus setMessage(String message) {
+ this.message = message;
+ return this;
+ }
+
+ public String getMessage() {
+ return message;
+ }
+
+ public EndpointStatus setState(EndpointStatusState state) {
+ this.state = state;
+ return this;
+ }
+
+ public EndpointStatusState getState() {
+ return state;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ EndpointStatus that = (EndpointStatus) o;
+ return Objects.equals(message, that.message) && Objects.equals(state, that.state);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(message, state);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(EndpointStatus.class)
+ .add("message", message)
+ .add("state", state)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointStatusState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointStatusState.java
new file mode 100755
index 000000000..6356c17ed
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointStatusState.java
@@ -0,0 +1,13 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+
+/** Current state of the endpoint */
+@Generated
+public enum EndpointStatusState {
+ OFFLINE,
+ ONLINE,
+ PROVISIONING,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointType.java
new file mode 100755
index 000000000..a02f073f2
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointType.java
@@ -0,0 +1,11 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+
+/** Type of endpoint. */
+@Generated
+public enum EndpointType {
+ STANDARD,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/GetEndpointRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/GetEndpointRequest.java
new file mode 100755
index 000000000..b0f9189a4
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/GetEndpointRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+/** Get an endpoint */
+@Generated
+public class GetEndpointRequest {
+ /** Name of the endpoint */
+ private String endpointName;
+
+ public GetEndpointRequest setEndpointName(String endpointName) {
+ this.endpointName = endpointName;
+ return this;
+ }
+
+ public String getEndpointName() {
+ return endpointName;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetEndpointRequest that = (GetEndpointRequest) o;
+ return Objects.equals(endpointName, that.endpointName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(endpointName);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetEndpointRequest.class).add("endpointName", endpointName).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/GetIndexRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/GetIndexRequest.java
new file mode 100755
index 000000000..76869386f
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/GetIndexRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+/** Get an index */
+@Generated
+public class GetIndexRequest {
+ /** Name of the index */
+ private String indexName;
+
+ public GetIndexRequest setIndexName(String indexName) {
+ this.indexName = indexName;
+ return this;
+ }
+
+ public String getIndexName() {
+ return indexName;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetIndexRequest that = (GetIndexRequest) o;
+ return Objects.equals(indexName, that.indexName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(indexName);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetIndexRequest.class).add("indexName", indexName).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListEndpointResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListEndpointResponse.java
new file mode 100755
index 000000000..6fd008732
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListEndpointResponse.java
@@ -0,0 +1,63 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class ListEndpointResponse {
+ /** An array of Endpoint objects */
+ @JsonProperty("endpoints")
+ private Collection endpoints;
+
+ /**
+ * A token that can be used to get the next page of results. If not present, there are no more
+ * results to show.
+ */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
+ public ListEndpointResponse setEndpoints(Collection endpoints) {
+ this.endpoints = endpoints;
+ return this;
+ }
+
+ public Collection getEndpoints() {
+ return endpoints;
+ }
+
+ public ListEndpointResponse setNextPageToken(String nextPageToken) {
+ this.nextPageToken = nextPageToken;
+ return this;
+ }
+
+ public String getNextPageToken() {
+ return nextPageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListEndpointResponse that = (ListEndpointResponse) o;
+ return Objects.equals(endpoints, that.endpoints)
+ && Objects.equals(nextPageToken, that.nextPageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(endpoints, nextPageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListEndpointResponse.class)
+ .add("endpoints", endpoints)
+ .add("nextPageToken", nextPageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListEndpointsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListEndpointsRequest.java
new file mode 100755
index 000000000..d6bba8b43
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListEndpointsRequest.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+/** List all endpoints */
+@Generated
+public class ListEndpointsRequest {
+ /** Token for pagination */
+ @QueryParam("page_token")
+ private String pageToken;
+
+ public ListEndpointsRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListEndpointsRequest that = (ListEndpointsRequest) o;
+ return Objects.equals(pageToken, that.pageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(pageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListEndpointsRequest.class).add("pageToken", pageToken).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListIndexesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListIndexesRequest.java
new file mode 100755
index 000000000..4ee34ec13
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListIndexesRequest.java
@@ -0,0 +1,60 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+/** List indexes */
+@Generated
+public class ListIndexesRequest {
+ /** Name of the endpoint */
+ @QueryParam("endpoint_name")
+ private String endpointName;
+
+ /** Token for pagination */
+ @QueryParam("page_token")
+ private String pageToken;
+
+ public ListIndexesRequest setEndpointName(String endpointName) {
+ this.endpointName = endpointName;
+ return this;
+ }
+
+ public String getEndpointName() {
+ return endpointName;
+ }
+
+ public ListIndexesRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListIndexesRequest that = (ListIndexesRequest) o;
+ return Objects.equals(endpointName, that.endpointName)
+ && Objects.equals(pageToken, that.pageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(endpointName, pageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListIndexesRequest.class)
+ .add("endpointName", endpointName)
+ .add("pageToken", pageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListVectorIndexesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListVectorIndexesResponse.java
new file mode 100755
index 000000000..657df2e84
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListVectorIndexesResponse.java
@@ -0,0 +1,63 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class ListVectorIndexesResponse {
+ /**
+ * A token that can be used to get the next page of results. If not present, there are no more
+ * results to show.
+ */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
+ /** */
+ @JsonProperty("vector_indexes")
+ private Collection vectorIndexes;
+
+ public ListVectorIndexesResponse setNextPageToken(String nextPageToken) {
+ this.nextPageToken = nextPageToken;
+ return this;
+ }
+
+ public String getNextPageToken() {
+ return nextPageToken;
+ }
+
+ public ListVectorIndexesResponse setVectorIndexes(Collection vectorIndexes) {
+ this.vectorIndexes = vectorIndexes;
+ return this;
+ }
+
+ public Collection getVectorIndexes() {
+ return vectorIndexes;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListVectorIndexesResponse that = (ListVectorIndexesResponse) o;
+ return Objects.equals(nextPageToken, that.nextPageToken)
+ && Objects.equals(vectorIndexes, that.vectorIndexes);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(nextPageToken, vectorIndexes);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListVectorIndexesResponse.class)
+ .add("nextPageToken", nextPageToken)
+ .add("vectorIndexes", vectorIndexes)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MiniVectorIndex.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MiniVectorIndex.java
new file mode 100755
index 000000000..fcfcd4f33
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MiniVectorIndex.java
@@ -0,0 +1,111 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class MiniVectorIndex {
+ /** The user who created the index. */
+ @JsonProperty("creator")
+ private String creator;
+
+ /** Name of the endpoint associated with the index */
+ @JsonProperty("endpoint_name")
+ private String endpointName;
+
+ /**
+ * There are 2 types of Vector Search indexes:
+ *
+ *
- `DELTA_SYNC`: An index that automatically syncs with a source Delta Table, automatically
+ * and incrementally updating the index as the underlying data in the Delta Table changes. -
+ * `DIRECT_ACCESS`: An index that supports direct read and write of vectors and metadata through
+ * our REST and SDK APIs. With this model, the user manages index updates.
+ */
+ @JsonProperty("index_type")
+ private VectorIndexType indexType;
+
+ /** Name of the index */
+ @JsonProperty("name")
+ private String name;
+
+ /** Primary key of the index */
+ @JsonProperty("primary_key")
+ private String primaryKey;
+
+ public MiniVectorIndex setCreator(String creator) {
+ this.creator = creator;
+ return this;
+ }
+
+ public String getCreator() {
+ return creator;
+ }
+
+ public MiniVectorIndex setEndpointName(String endpointName) {
+ this.endpointName = endpointName;
+ return this;
+ }
+
+ public String getEndpointName() {
+ return endpointName;
+ }
+
+ public MiniVectorIndex setIndexType(VectorIndexType indexType) {
+ this.indexType = indexType;
+ return this;
+ }
+
+ public VectorIndexType getIndexType() {
+ return indexType;
+ }
+
+ public MiniVectorIndex setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public MiniVectorIndex setPrimaryKey(String primaryKey) {
+ this.primaryKey = primaryKey;
+ return this;
+ }
+
+ public String getPrimaryKey() {
+ return primaryKey;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ MiniVectorIndex that = (MiniVectorIndex) o;
+ return Objects.equals(creator, that.creator)
+ && Objects.equals(endpointName, that.endpointName)
+ && Objects.equals(indexType, that.indexType)
+ && Objects.equals(name, that.name)
+ && Objects.equals(primaryKey, that.primaryKey);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(creator, endpointName, indexType, name, primaryKey);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(MiniVectorIndex.class)
+ .add("creator", creator)
+ .add("endpointName", endpointName)
+ .add("indexType", indexType)
+ .add("name", name)
+ .add("primaryKey", primaryKey)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/PipelineType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/PipelineType.java
new file mode 100755
index 000000000..bef62c430
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/PipelineType.java
@@ -0,0 +1,25 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * Pipeline execution mode.
+ *
+ *
- `TRIGGERED`: If the pipeline uses the triggered execution mode, the system stops processing
+ * after successfully refreshing the source table in the pipeline once, ensuring the table is
+ * updated based on the data available when the update started. - `CONTINUOUS`: If the pipeline uses
+ * continuous execution, the pipeline processes new data as it arrives in the source table to keep
+ * vector index fresh.
+ */
+@Generated
+public enum PipelineType {
+ CONTINUOUS, // If the pipeline uses continuous execution, the pipeline processes new data as
+ // it arrives in the source table to keep vector index fresh.
+ TRIGGERED, // If the pipeline uses the triggered execution mode, the system stops
+ // processing after successfully refreshing the source table in the pipeline
+ // once, ensuring the table is updated based on the data available when the
+ // update started.
+
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexRequest.java
new file mode 100755
index 000000000..16fc8315e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexRequest.java
@@ -0,0 +1,128 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class QueryVectorIndexRequest {
+ /** List of column names to include in the response. */
+ @JsonProperty("columns")
+ private Collection columns;
+
+ /**
+ * JSON string representing query filters.
+ *
+ *
Example filters: - `{"id <": 5}`: Filter for id less than 5. - `{"id >": 5}`: Filter for id
+ * greater than 5. - `{"id <=": 5}`: Filter for id less than equal to 5. - `{"id >=": 5}`: Filter
+ * for id greater than equal to 5. - `{"id": 5}`: Filter for id equal to 5.
+ */
+ @JsonProperty("filters_json")
+ private String filtersJson;
+
+ /** Name of the vector index to query. */
+ private String indexName;
+
+ /** Number of results to return. Defaults to 10. */
+ @JsonProperty("num_results")
+ private Long numResults;
+
+ /** Query text. Required for Delta Sync Index using model endpoint. */
+ @JsonProperty("query_text")
+ private String queryText;
+
+ /**
+ * Query vector. Required for Direct Vector Access Index and Delta Sync Index using self-managed
+ * vectors.
+ */
+ @JsonProperty("query_vector")
+ private Collection queryVector;
+
+ public QueryVectorIndexRequest setColumns(Collection columns) {
+ this.columns = columns;
+ return this;
+ }
+
+ public Collection getColumns() {
+ return columns;
+ }
+
+ public QueryVectorIndexRequest setFiltersJson(String filtersJson) {
+ this.filtersJson = filtersJson;
+ return this;
+ }
+
+ public String getFiltersJson() {
+ return filtersJson;
+ }
+
+ public QueryVectorIndexRequest setIndexName(String indexName) {
+ this.indexName = indexName;
+ return this;
+ }
+
+ public String getIndexName() {
+ return indexName;
+ }
+
+ public QueryVectorIndexRequest setNumResults(Long numResults) {
+ this.numResults = numResults;
+ return this;
+ }
+
+ public Long getNumResults() {
+ return numResults;
+ }
+
+ public QueryVectorIndexRequest setQueryText(String queryText) {
+ this.queryText = queryText;
+ return this;
+ }
+
+ public String getQueryText() {
+ return queryText;
+ }
+
+ public QueryVectorIndexRequest setQueryVector(Collection queryVector) {
+ this.queryVector = queryVector;
+ return this;
+ }
+
+ public Collection getQueryVector() {
+ return queryVector;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ QueryVectorIndexRequest that = (QueryVectorIndexRequest) o;
+ return Objects.equals(columns, that.columns)
+ && Objects.equals(filtersJson, that.filtersJson)
+ && Objects.equals(indexName, that.indexName)
+ && Objects.equals(numResults, that.numResults)
+ && Objects.equals(queryText, that.queryText)
+ && Objects.equals(queryVector, that.queryVector);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(columns, filtersJson, indexName, numResults, queryText, queryVector);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(QueryVectorIndexRequest.class)
+ .add("columns", columns)
+ .add("filtersJson", filtersJson)
+ .add("indexName", indexName)
+ .add("numResults", numResults)
+ .add("queryText", queryText)
+ .add("queryVector", queryVector)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexResponse.java
new file mode 100755
index 000000000..64a126c89
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexResponse.java
@@ -0,0 +1,58 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class QueryVectorIndexResponse {
+ /** Metadata about the result set. */
+ @JsonProperty("manifest")
+ private ResultManifest manifest;
+
+ /** Data returned in the query result. */
+ @JsonProperty("result")
+ private ResultData result;
+
+ public QueryVectorIndexResponse setManifest(ResultManifest manifest) {
+ this.manifest = manifest;
+ return this;
+ }
+
+ public ResultManifest getManifest() {
+ return manifest;
+ }
+
+ public QueryVectorIndexResponse setResult(ResultData result) {
+ this.result = result;
+ return this;
+ }
+
+ public ResultData getResult() {
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ QueryVectorIndexResponse that = (QueryVectorIndexResponse) o;
+ return Objects.equals(manifest, that.manifest) && Objects.equals(result, that.result);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(manifest, result);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(QueryVectorIndexResponse.class)
+ .add("manifest", manifest)
+ .add("result", result)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ResultData.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ResultData.java
new file mode 100755
index 000000000..76b6bf9ac
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ResultData.java
@@ -0,0 +1,60 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Data returned in the query result. */
+@Generated
+public class ResultData {
+ /** Data rows returned in the query. */
+ @JsonProperty("data_array")
+ private Collection> dataArray;
+
+ /** Number of rows in the result set. */
+ @JsonProperty("row_count")
+ private Long rowCount;
+
+ public ResultData setDataArray(Collection> dataArray) {
+ this.dataArray = dataArray;
+ return this;
+ }
+
+ public Collection> getDataArray() {
+ return dataArray;
+ }
+
+ public ResultData setRowCount(Long rowCount) {
+ this.rowCount = rowCount;
+ return this;
+ }
+
+ public Long getRowCount() {
+ return rowCount;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ResultData that = (ResultData) o;
+ return Objects.equals(dataArray, that.dataArray) && Objects.equals(rowCount, that.rowCount);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(dataArray, rowCount);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ResultData.class)
+ .add("dataArray", dataArray)
+ .add("rowCount", rowCount)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ResultManifest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ResultManifest.java
new file mode 100755
index 000000000..bf365fb38
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ResultManifest.java
@@ -0,0 +1,60 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Metadata about the result set. */
+@Generated
+public class ResultManifest {
+ /** Number of columns in the result set. */
+ @JsonProperty("column_count")
+ private Long columnCount;
+
+ /** Information about each column in the result set. */
+ @JsonProperty("columns")
+ private Collection columns;
+
+ public ResultManifest setColumnCount(Long columnCount) {
+ this.columnCount = columnCount;
+ return this;
+ }
+
+ public Long getColumnCount() {
+ return columnCount;
+ }
+
+ public ResultManifest setColumns(Collection columns) {
+ this.columns = columns;
+ return this;
+ }
+
+ public Collection getColumns() {
+ return columns;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ResultManifest that = (ResultManifest) o;
+ return Objects.equals(columnCount, that.columnCount) && Objects.equals(columns, that.columns);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(columnCount, columns);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ResultManifest.class)
+ .add("columnCount", columnCount)
+ .add("columns", columns)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/SyncIndexRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/SyncIndexRequest.java
new file mode 100755
index 000000000..dbfd2fbbe
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/SyncIndexRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+/** Synchronize an index */
+@Generated
+public class SyncIndexRequest {
+ /** Name of the vector index to synchronize. Must be a Delta Sync Index. */
+ private String indexName;
+
+ public SyncIndexRequest setIndexName(String indexName) {
+ this.indexName = indexName;
+ return this;
+ }
+
+ public String getIndexName() {
+ return indexName;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ SyncIndexRequest that = (SyncIndexRequest) o;
+ return Objects.equals(indexName, that.indexName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(indexName);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(SyncIndexRequest.class).add("indexName", indexName).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataResult.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataResult.java
new file mode 100755
index 000000000..783c7bc82
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataResult.java
@@ -0,0 +1,61 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Result of the upsert or delete operation. */
+@Generated
+public class UpsertDataResult {
+ /** List of primary keys for rows that failed to process. */
+ @JsonProperty("failed_primary_keys")
+ private Collection failedPrimaryKeys;
+
+ /** Count of successfully processed rows. */
+ @JsonProperty("success_row_count")
+ private Long successRowCount;
+
+ public UpsertDataResult setFailedPrimaryKeys(Collection failedPrimaryKeys) {
+ this.failedPrimaryKeys = failedPrimaryKeys;
+ return this;
+ }
+
+ public Collection getFailedPrimaryKeys() {
+ return failedPrimaryKeys;
+ }
+
+ public UpsertDataResult setSuccessRowCount(Long successRowCount) {
+ this.successRowCount = successRowCount;
+ return this;
+ }
+
+ public Long getSuccessRowCount() {
+ return successRowCount;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpsertDataResult that = (UpsertDataResult) o;
+ return Objects.equals(failedPrimaryKeys, that.failedPrimaryKeys)
+ && Objects.equals(successRowCount, that.successRowCount);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(failedPrimaryKeys, successRowCount);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpsertDataResult.class)
+ .add("failedPrimaryKeys", failedPrimaryKeys)
+ .add("successRowCount", successRowCount)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataStatus.java
new file mode 100755
index 000000000..9ad28c3da
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataStatus.java
@@ -0,0 +1,13 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+
+/** Status of the upsert operation. */
+@Generated
+public enum UpsertDataStatus {
+ FAILURE,
+ PARTIAL_SUCCESS,
+ SUCCESS,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataVectorIndexRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataVectorIndexRequest.java
new file mode 100755
index 000000000..bf114b3c0
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataVectorIndexRequest.java
@@ -0,0 +1,60 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Request payload for upserting data into a vector index. */
+@Generated
+public class UpsertDataVectorIndexRequest {
+ /** JSON string representing the data to be upserted. */
+ @JsonProperty("inputs_json")
+ private String inputsJson;
+
+ /**
+ * Name of the vector index where data is to be upserted. Must be a Direct Vector Access Index.
+ */
+ private String name;
+
+ public UpsertDataVectorIndexRequest setInputsJson(String inputsJson) {
+ this.inputsJson = inputsJson;
+ return this;
+ }
+
+ public String getInputsJson() {
+ return inputsJson;
+ }
+
+ public UpsertDataVectorIndexRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpsertDataVectorIndexRequest that = (UpsertDataVectorIndexRequest) o;
+ return Objects.equals(inputsJson, that.inputsJson) && Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(inputsJson, name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpsertDataVectorIndexRequest.class)
+ .add("inputsJson", inputsJson)
+ .add("name", name)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataVectorIndexResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataVectorIndexResponse.java
new file mode 100755
index 000000000..65c905c76
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataVectorIndexResponse.java
@@ -0,0 +1,59 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Response to an upsert data vector index request. */
+@Generated
+public class UpsertDataVectorIndexResponse {
+ /** Result of the upsert or delete operation. */
+ @JsonProperty("result")
+ private UpsertDataResult result;
+
+ /** Status of the upsert operation. */
+ @JsonProperty("status")
+ private UpsertDataStatus status;
+
+ public UpsertDataVectorIndexResponse setResult(UpsertDataResult result) {
+ this.result = result;
+ return this;
+ }
+
+ public UpsertDataResult getResult() {
+ return result;
+ }
+
+ public UpsertDataVectorIndexResponse setStatus(UpsertDataStatus status) {
+ this.status = status;
+ return this;
+ }
+
+ public UpsertDataStatus getStatus() {
+ return status;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpsertDataVectorIndexResponse that = (UpsertDataVectorIndexResponse) o;
+ return Objects.equals(result, that.result) && Objects.equals(status, that.status);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(result, status);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpsertDataVectorIndexResponse.class)
+ .add("result", result)
+ .add("status", status)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorIndex.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorIndex.java
new file mode 100755
index 000000000..3488659a0
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorIndex.java
@@ -0,0 +1,166 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class VectorIndex {
+ /** The user who created the index. */
+ @JsonProperty("creator")
+ private String creator;
+
+ /** */
+ @JsonProperty("delta_sync_vector_index_spec")
+ private DeltaSyncVectorIndexSpecResponse deltaSyncVectorIndexSpec;
+
+ /** */
+ @JsonProperty("direct_access_vector_index_spec")
+ private DirectAccessVectorIndexSpec directAccessVectorIndexSpec;
+
+ /** Name of the endpoint associated with the index */
+ @JsonProperty("endpoint_name")
+ private String endpointName;
+
+ /**
+ * There are 2 types of Vector Search indexes:
+ *
+ *
- `DELTA_SYNC`: An index that automatically syncs with a source Delta Table, automatically
+ * and incrementally updating the index as the underlying data in the Delta Table changes. -
+ * `DIRECT_ACCESS`: An index that supports direct read and write of vectors and metadata through
+ * our REST and SDK APIs. With this model, the user manages index updates.
+ */
+ @JsonProperty("index_type")
+ private VectorIndexType indexType;
+
+ /** Name of the index */
+ @JsonProperty("name")
+ private String name;
+
+ /** Primary key of the index */
+ @JsonProperty("primary_key")
+ private String primaryKey;
+
+ /** */
+ @JsonProperty("status")
+ private VectorIndexStatus status;
+
+ public VectorIndex setCreator(String creator) {
+ this.creator = creator;
+ return this;
+ }
+
+ public String getCreator() {
+ return creator;
+ }
+
+ public VectorIndex setDeltaSyncVectorIndexSpec(
+ DeltaSyncVectorIndexSpecResponse deltaSyncVectorIndexSpec) {
+ this.deltaSyncVectorIndexSpec = deltaSyncVectorIndexSpec;
+ return this;
+ }
+
+ public DeltaSyncVectorIndexSpecResponse getDeltaSyncVectorIndexSpec() {
+ return deltaSyncVectorIndexSpec;
+ }
+
+ public VectorIndex setDirectAccessVectorIndexSpec(
+ DirectAccessVectorIndexSpec directAccessVectorIndexSpec) {
+ this.directAccessVectorIndexSpec = directAccessVectorIndexSpec;
+ return this;
+ }
+
+ public DirectAccessVectorIndexSpec getDirectAccessVectorIndexSpec() {
+ return directAccessVectorIndexSpec;
+ }
+
+ public VectorIndex setEndpointName(String endpointName) {
+ this.endpointName = endpointName;
+ return this;
+ }
+
+ public String getEndpointName() {
+ return endpointName;
+ }
+
+ public VectorIndex setIndexType(VectorIndexType indexType) {
+ this.indexType = indexType;
+ return this;
+ }
+
+ public VectorIndexType getIndexType() {
+ return indexType;
+ }
+
+ public VectorIndex setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public VectorIndex setPrimaryKey(String primaryKey) {
+ this.primaryKey = primaryKey;
+ return this;
+ }
+
+ public String getPrimaryKey() {
+ return primaryKey;
+ }
+
+ public VectorIndex setStatus(VectorIndexStatus status) {
+ this.status = status;
+ return this;
+ }
+
+ public VectorIndexStatus getStatus() {
+ return status;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ VectorIndex that = (VectorIndex) o;
+ return Objects.equals(creator, that.creator)
+ && Objects.equals(deltaSyncVectorIndexSpec, that.deltaSyncVectorIndexSpec)
+ && Objects.equals(directAccessVectorIndexSpec, that.directAccessVectorIndexSpec)
+ && Objects.equals(endpointName, that.endpointName)
+ && Objects.equals(indexType, that.indexType)
+ && Objects.equals(name, that.name)
+ && Objects.equals(primaryKey, that.primaryKey)
+ && Objects.equals(status, that.status);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ creator,
+ deltaSyncVectorIndexSpec,
+ directAccessVectorIndexSpec,
+ endpointName,
+ indexType,
+ name,
+ primaryKey,
+ status);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(VectorIndex.class)
+ .add("creator", creator)
+ .add("deltaSyncVectorIndexSpec", deltaSyncVectorIndexSpec)
+ .add("directAccessVectorIndexSpec", directAccessVectorIndexSpec)
+ .add("endpointName", endpointName)
+ .add("indexType", indexType)
+ .add("name", name)
+ .add("primaryKey", primaryKey)
+ .add("status", status)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorIndexStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorIndexStatus.java
new file mode 100755
index 000000000..83868a47b
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorIndexStatus.java
@@ -0,0 +1,89 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class VectorIndexStatus {
+ /** Index API Url to be used to perform operations on the index */
+ @JsonProperty("index_url")
+ private String indexUrl;
+
+ /** Number of rows indexed */
+ @JsonProperty("indexed_row_count")
+ private Long indexedRowCount;
+
+ /** Message associated with the index status */
+ @JsonProperty("message")
+ private String message;
+
+ /** Whether the index is ready for search */
+ @JsonProperty("ready")
+ private Boolean ready;
+
+ public VectorIndexStatus setIndexUrl(String indexUrl) {
+ this.indexUrl = indexUrl;
+ return this;
+ }
+
+ public String getIndexUrl() {
+ return indexUrl;
+ }
+
+ public VectorIndexStatus setIndexedRowCount(Long indexedRowCount) {
+ this.indexedRowCount = indexedRowCount;
+ return this;
+ }
+
+ public Long getIndexedRowCount() {
+ return indexedRowCount;
+ }
+
+ public VectorIndexStatus setMessage(String message) {
+ this.message = message;
+ return this;
+ }
+
+ public String getMessage() {
+ return message;
+ }
+
+ public VectorIndexStatus setReady(Boolean ready) {
+ this.ready = ready;
+ return this;
+ }
+
+ public Boolean getReady() {
+ return ready;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ VectorIndexStatus that = (VectorIndexStatus) o;
+ return Objects.equals(indexUrl, that.indexUrl)
+ && Objects.equals(indexedRowCount, that.indexedRowCount)
+ && Objects.equals(message, that.message)
+ && Objects.equals(ready, that.ready);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(indexUrl, indexedRowCount, message, ready);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(VectorIndexStatus.class)
+ .add("indexUrl", indexUrl)
+ .add("indexedRowCount", indexedRowCount)
+ .add("message", message)
+ .add("ready", ready)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorIndexType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorIndexType.java
new file mode 100755
index 000000000..21367a4ee
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorIndexType.java
@@ -0,0 +1,23 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * There are 2 types of Vector Search indexes:
+ *
+ *
- `DELTA_SYNC`: An index that automatically syncs with a source Delta Table, automatically and
+ * incrementally updating the index as the underlying data in the Delta Table changes. -
+ * `DIRECT_ACCESS`: An index that supports direct read and write of vectors and metadata through our
+ * REST and SDK APIs. With this model, the user manages index updates.
+ */
+@Generated
+public enum VectorIndexType {
+ DELTA_SYNC, // An index that automatically syncs with a source Delta Table, automatically
+ // and incrementally updating the index as the underlying data in the Delta
+ // Table changes.
+ DIRECT_ACCESS, // An index that supports direct read and write of vectors and metadata through
+ // our REST and SDK APIs. With this model, the user manages index updates.
+
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsAPI.java
new file mode 100755
index 000000000..afbb2b787
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsAPI.java
@@ -0,0 +1,133 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.Paginator;
+import com.databricks.sdk.support.Wait;
+import java.time.Duration;
+import java.util.Arrays;
+import java.util.concurrent.TimeoutException;
+import java.util.function.Consumer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/** **Endpoint**: Represents the compute resources to host vector search indexes. */
+@Generated
+public class VectorSearchEndpointsAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(VectorSearchEndpointsAPI.class);
+
+ private final VectorSearchEndpointsService impl;
+
+ /** Regular-use constructor */
+ public VectorSearchEndpointsAPI(ApiClient apiClient) {
+ impl = new VectorSearchEndpointsImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public VectorSearchEndpointsAPI(VectorSearchEndpointsService mock) {
+ impl = mock;
+ }
+
+ public EndpointInfo waitGetEndpointVectorSearchEndpointOnline(String endpointName)
+ throws TimeoutException {
+ return waitGetEndpointVectorSearchEndpointOnline(endpointName, Duration.ofMinutes(20), null);
+ }
+
+ public EndpointInfo waitGetEndpointVectorSearchEndpointOnline(
+ String endpointName, Duration timeout, Consumer callback)
+ throws TimeoutException {
+ long deadline = System.currentTimeMillis() + timeout.toMillis();
+ java.util.List targetStates = Arrays.asList(EndpointStatusState.ONLINE);
+ java.util.List failureStates = Arrays.asList(EndpointStatusState.OFFLINE);
+ String statusMessage = "polling...";
+ int attempt = 1;
+ while (System.currentTimeMillis() < deadline) {
+ EndpointInfo poll = getEndpoint(new GetEndpointRequest().setEndpointName(endpointName));
+ EndpointStatusState status = poll.getEndpointStatus().getState();
+ statusMessage = String.format("current status: %s", status);
+ if (poll.getEndpointStatus() != null) {
+ statusMessage = poll.getEndpointStatus().getMessage();
+ }
+ if (targetStates.contains(status)) {
+ return poll;
+ }
+ if (callback != null) {
+ callback.accept(poll);
+ }
+ if (failureStates.contains(status)) {
+ String msg = String.format("failed to reach ONLINE, got %s: %s", status, statusMessage);
+ throw new IllegalStateException(msg);
+ }
+
+ String prefix = String.format("endpointName=%s", endpointName);
+ int sleep = attempt;
+ if (sleep > 10) {
+ // sleep 10s max per attempt
+ sleep = 10;
+ }
+ LOG.info("{}: ({}) {} (sleeping ~{}s)", prefix, status, statusMessage, sleep);
+ try {
+ Thread.sleep((long) (sleep * 1000L + Math.random() * 1000));
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ }
+ attempt++;
+ }
+ throw new TimeoutException(String.format("timed out after %s: %s", timeout, statusMessage));
+ }
+
+ public Wait createEndpoint(String name, EndpointType endpointType) {
+ return createEndpoint(new CreateEndpoint().setName(name).setEndpointType(endpointType));
+ }
+
+ /**
+ * Create an endpoint.
+ *
+ *
Create a new endpoint.
+ */
+ public Wait createEndpoint(CreateEndpoint request) {
+ EndpointInfo response = impl.createEndpoint(request);
+ return new Wait<>(
+ (timeout, callback) ->
+ waitGetEndpointVectorSearchEndpointOnline(response.getName(), timeout, callback),
+ response);
+ }
+
+ public void deleteEndpoint(String endpointName, String name) {
+ deleteEndpoint(new DeleteEndpointRequest().setEndpointName(endpointName).setName(name));
+ }
+
+ /** Delete an endpoint. */
+ public void deleteEndpoint(DeleteEndpointRequest request) {
+ impl.deleteEndpoint(request);
+ }
+
+ public EndpointInfo getEndpoint(String endpointName) {
+ return getEndpoint(new GetEndpointRequest().setEndpointName(endpointName));
+ }
+
+ /** Get an endpoint. */
+ public EndpointInfo getEndpoint(GetEndpointRequest request) {
+ return impl.getEndpoint(request);
+ }
+
+ /** List all endpoints. */
+ public Iterable listEndpoints(ListEndpointsRequest request) {
+ return new Paginator<>(
+ request,
+ impl::listEndpoints,
+ ListEndpointResponse::getEndpoints,
+ response -> {
+ String token = response.getNextPageToken();
+ if (token == null) {
+ return null;
+ }
+ return request.setPageToken(token);
+ });
+ }
+
+ public VectorSearchEndpointsService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsImpl.java
new file mode 100755
index 000000000..22719cb80
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsImpl.java
@@ -0,0 +1,49 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import java.util.HashMap;
+import java.util.Map;
+
+/** Package-local implementation of VectorSearchEndpoints */
+@Generated
+class VectorSearchEndpointsImpl implements VectorSearchEndpointsService {
+ private final ApiClient apiClient;
+
+ public VectorSearchEndpointsImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public EndpointInfo createEndpoint(CreateEndpoint request) {
+ String path = "/api/2.0/vector-search/endpoints";
+ Map headers = new HashMap<>();
+ headers.put("Accept", "application/json");
+ headers.put("Content-Type", "application/json");
+ return apiClient.POST(path, request, EndpointInfo.class, headers);
+ }
+
+ @Override
+ public void deleteEndpoint(DeleteEndpointRequest request) {
+ String path = String.format("/api/2.0/vector-search/endpoints/%s", request.getEndpointName());
+ Map headers = new HashMap<>();
+ apiClient.DELETE(path, request, Void.class, headers);
+ }
+
+ @Override
+ public EndpointInfo getEndpoint(GetEndpointRequest request) {
+ String path = String.format("/api/2.0/vector-search/endpoints/%s", request.getEndpointName());
+ Map headers = new HashMap<>();
+ headers.put("Accept", "application/json");
+ return apiClient.GET(path, request, EndpointInfo.class, headers);
+ }
+
+ @Override
+ public ListEndpointResponse listEndpoints(ListEndpointsRequest request) {
+ String path = "/api/2.0/vector-search/endpoints";
+ Map headers = new HashMap<>();
+ headers.put("Accept", "application/json");
+ return apiClient.GET(path, request, ListEndpointResponse.class, headers);
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsService.java
new file mode 100755
index 000000000..68552a0e9
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsService.java
@@ -0,0 +1,30 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * **Endpoint**: Represents the compute resources to host vector search indexes.
+ *
+ *
This is the high-level interface, that contains generated methods.
+ *
+ *
Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface VectorSearchEndpointsService {
+ /**
+ * Create an endpoint.
+ *
+ *
Create a new endpoint.
+ */
+ EndpointInfo createEndpoint(CreateEndpoint createEndpoint);
+
+ /** Delete an endpoint. */
+ void deleteEndpoint(DeleteEndpointRequest deleteEndpointRequest);
+
+ /** Get an endpoint. */
+ EndpointInfo getEndpoint(GetEndpointRequest getEndpointRequest);
+
+ /** List all endpoints. */
+ ListEndpointResponse listEndpoints(ListEndpointsRequest listEndpointsRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesAPI.java
new file mode 100755
index 000000000..55d53a153
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesAPI.java
@@ -0,0 +1,162 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.Paginator;
+import java.util.Collection;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * **Index**: An efficient representation of your embedding vectors that supports real-time and
+ * efficient approximate nearest neighbor (ANN) search queries.
+ *
+ *
There are 2 types of Vector Search indexes: * **Delta Sync Index**: An index that
+ * automatically syncs with a source Delta Table, automatically and incrementally updating the index
+ * as the underlying data in the Delta Table changes. * **Direct Vector Access Index**: An index
+ * that supports direct read and write of vectors and metadata through our REST and SDK APIs. With
+ * this model, the user manages index updates.
+ */
+@Generated
+public class VectorSearchIndexesAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(VectorSearchIndexesAPI.class);
+
+ private final VectorSearchIndexesService impl;
+
+ /** Regular-use constructor */
+ public VectorSearchIndexesAPI(ApiClient apiClient) {
+ impl = new VectorSearchIndexesImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public VectorSearchIndexesAPI(VectorSearchIndexesService mock) {
+ impl = mock;
+ }
+
+ public CreateVectorIndexResponse createIndex(
+ String name, String primaryKey, VectorIndexType indexType) {
+ return createIndex(
+ new CreateVectorIndexRequest()
+ .setName(name)
+ .setPrimaryKey(primaryKey)
+ .setIndexType(indexType));
+ }
+
+ /**
+ * Create an index.
+ *
+ *
Create a new index.
+ */
+ public CreateVectorIndexResponse createIndex(CreateVectorIndexRequest request) {
+ return impl.createIndex(request);
+ }
+
+ public DeleteDataVectorIndexResponse deleteDataVectorIndex(
+ String name, Collection primaryKeys) {
+ return deleteDataVectorIndex(
+ new DeleteDataVectorIndexRequest().setName(name).setPrimaryKeys(primaryKeys));
+ }
+
+ /**
+ * Delete data from index.
+ *
+ *
Handles the deletion of data from a specified vector index.
+ */
+ public DeleteDataVectorIndexResponse deleteDataVectorIndex(DeleteDataVectorIndexRequest request) {
+ return impl.deleteDataVectorIndex(request);
+ }
+
+ public void deleteIndex(String indexName) {
+ deleteIndex(new DeleteIndexRequest().setIndexName(indexName));
+ }
+
+ /**
+ * Delete an index.
+ *
+ *
Delete an index.
+ */
+ public void deleteIndex(DeleteIndexRequest request) {
+ impl.deleteIndex(request);
+ }
+
+ public VectorIndex getIndex(String indexName) {
+ return getIndex(new GetIndexRequest().setIndexName(indexName));
+ }
+
+ /**
+ * Get an index.
+ *
+ *
Get an index.
+ */
+ public VectorIndex getIndex(GetIndexRequest request) {
+ return impl.getIndex(request);
+ }
+
+ public Iterable listIndexes(String endpointName) {
+ return listIndexes(new ListIndexesRequest().setEndpointName(endpointName));
+ }
+
+ /**
+ * List indexes.
+ *
+ *
List all indexes in the given endpoint.
+ */
+ public Iterable listIndexes(ListIndexesRequest request) {
+ return new Paginator<>(
+ request,
+ impl::listIndexes,
+ ListVectorIndexesResponse::getVectorIndexes,
+ response -> {
+ String token = response.getNextPageToken();
+ if (token == null) {
+ return null;
+ }
+ return request.setPageToken(token);
+ });
+ }
+
+ public QueryVectorIndexResponse queryIndex(String indexName, Collection columns) {
+ return queryIndex(new QueryVectorIndexRequest().setIndexName(indexName).setColumns(columns));
+ }
+
+ /**
+ * Query an index.
+ *
+ *
There are 2 types of Vector Search indexes: * **Delta Sync Index**: An index that
+ * automatically syncs with a source Delta Table, automatically and incrementally updating the index
+ * as the underlying data in the Delta Table changes. * **Direct Vector Access Index**: An index
+ * that supports direct read and write of vectors and metadata through our REST and SDK APIs. With
+ * this model, the user manages index updates.
+ *
+ *
This is the high-level interface, that contains generated methods.
+ *
+ *
Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface VectorSearchIndexesService {
+ /**
+ * Create an index.
+ *
+ *
Create a new index.
+ */
+ CreateVectorIndexResponse createIndex(CreateVectorIndexRequest createVectorIndexRequest);
+
+ /**
+ * Delete data from index.
+ *
+ *
Handles the deletion of data from a specified vector index.
+ */
+ DeleteDataVectorIndexResponse deleteDataVectorIndex(
+ DeleteDataVectorIndexRequest deleteDataVectorIndexRequest);
+
+ /**
+ * Delete an index.
+ *
+ *
Delete an index.
+ */
+ void deleteIndex(DeleteIndexRequest deleteIndexRequest);
+
+ /**
+ * Get an index.
+ *
+ *
Get an index.
+ */
+ VectorIndex getIndex(GetIndexRequest getIndexRequest);
+
+ /**
+ * List indexes.
+ *
+ *
List all indexes in the given endpoint.
+ */
+ ListVectorIndexesResponse listIndexes(ListIndexesRequest listIndexesRequest);
+
+ /**
+ * Query an index.
+ *
+ *
Triggers a synchronization process for a specified vector index.
+ */
+ void syncIndex(SyncIndexRequest syncIndexRequest);
+
+ /**
+ * Upsert data into an index.
+ *
+ *
Handles the upserting of data into a specified vector index.
+ */
+ UpsertDataVectorIndexResponse upsertDataVectorIndex(
+ UpsertDataVectorIndexRequest upsertDataVectorIndexRequest);
+}