Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 5 additions & 2 deletions examples/src/main/java/io/milvus/v1/BulkWriterExample.java
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,7 @@
import java.io.IOException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
Expand Down Expand Up @@ -547,8 +548,9 @@ private void callCloudImport(List<List<String>> batchFiles, String collectionNam
String secretKey = StorageConsts.cloudStorage == CloudStorage.AZURE ? StorageConsts.AZURE_ACCOUNT_KEY : StorageConsts.STORAGE_SECRET_KEY;

System.out.println("\n===================== call cloudImport ====================");
List<String> objectUrls = Lists.newArrayList(objectUrl);
CloudImportRequest bulkImportRequest = CloudImportRequest.builder()
.objectUrl(objectUrl).accessKey(accessKey).secretKey(secretKey)
.objectUrls(Lists.newArrayList(Collections.singleton(objectUrls))).accessKey(accessKey).secretKey(secretKey)
.clusterId(CloudImportConsts.CLUSTER_ID).collectionName(collectionName).partitionName(partitionName)
.apiKey(CloudImportConsts.API_KEY)
.build();
Expand Down Expand Up @@ -710,8 +712,9 @@ private Long getCollectionStatistics() {

private static void exampleCloudImport() {
System.out.println("\n===================== import files to cloud vectordb ====================");
List<String> objectUrls = Lists.newArrayList(CloudImportConsts.OBJECT_URL);
CloudImportRequest request = CloudImportRequest.builder()
.objectUrl(CloudImportConsts.OBJECT_URL).accessKey(CloudImportConsts.OBJECT_ACCESS_KEY).secretKey(CloudImportConsts.OBJECT_SECRET_KEY)
.objectUrls(Lists.newArrayList(Collections.singleton(objectUrls))).accessKey(CloudImportConsts.OBJECT_ACCESS_KEY).secretKey(CloudImportConsts.OBJECT_SECRET_KEY)
.clusterId(CloudImportConsts.CLUSTER_ID).collectionName(CloudImportConsts.COLLECTION_NAME).partitionName(CloudImportConsts.PARTITION_NAME)
.apiKey(CloudImportConsts.API_KEY)
.build();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -688,8 +688,9 @@ private static Long getCollectionRowCount() {

private static void exampleCloudImport() {
System.out.println("\n===================== import files to cloud vectordb ====================");
List<String> objectUrls = Lists.newArrayList(CloudImportConsts.OBJECT_URL);
CloudImportRequest request = CloudImportRequest.builder()
.objectUrl(CloudImportConsts.OBJECT_URL).accessKey(CloudImportConsts.OBJECT_ACCESS_KEY).secretKey(CloudImportConsts.OBJECT_SECRET_KEY)
.objectUrls(Lists.newArrayList(Collections.singleton(objectUrls))).accessKey(CloudImportConsts.OBJECT_ACCESS_KEY).secretKey(CloudImportConsts.OBJECT_SECRET_KEY)
.clusterId(CloudImportConsts.CLUSTER_ID).collectionName(CloudImportConsts.COLLECTION_NAME).partitionName(CloudImportConsts.PARTITION_NAME)
.apiKey(CloudImportConsts.API_KEY)
.build();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,18 +24,76 @@
import lombok.NoArgsConstructor;
import lombok.experimental.SuperBuilder;

import java.util.List;

@Data
@SuperBuilder
@AllArgsConstructor
@NoArgsConstructor
/*
If you want to import data into a Zilliz cloud instance and your data is stored in a storage bucket,
you can use this method to import the data from the bucket.
*/
public class CloudImportRequest extends BaseImportRequest {
private static final long serialVersionUID = 6487348610099924813L;
private String objectUrl;
private String accessKey;
private String secretKey;
private String token;
private String clusterId;

/**
* For Free & Serverless deployments: specifying this parameter is not supported.
* For Dedicated deployments: this parameter can be specified; defaults to the "default" database.
*/
private String dbName;
private String collectionName;

/**
* If the collection has partitionKey enabled:
* - The partitionName parameter cannot be specified for import.
* If the collection does not have partitionKey enabled:
* - You may specify partitionName for the import.
* - Defaults to the "default" partition if not specified.
*/
private String partitionName;

/**
* Data import can be configured in multiple ways using `objectUrls`:
* <p>
* 1. Multi-path import (multiple folders or files):
* "objectUrls": [
* ["s3://bucket-name/parquet-folder-1/1.parquet"],
* ["s3://bucket-name/parquet-folder-2/1.parquet"],
* ["s3://bucket-name/parquet-folder-3/"]
* ]
* <p>
* 2. Folder import:
* "objectUrls": [
* ["s3://bucket-name/parquet-folder/"]
* ]
* <p>
* 3. Single file import:
* "objectUrls": [
* ["s3://bucket-name/parquet-folder/1.parquet"]
* ]
*/
private List<List<String>> objectUrls;

/**
* Use `objectUrls` instead for more flexible multi-path configuration.
* <p>
* Folder import:
* "objectUrl": "s3://bucket-name/parquet-folder/"
* <p>
* File import:
* "objectUrl": "s3://bucket-name/parquet-folder/1.parquet"
*/
@Deprecated
private String objectUrl;

/** Specify `accessKey` and `secretKey`; for short-term credentials, also include `token`. */
private String accessKey;

/** Specify `accessKey` and `secretKey`; for short-term credentials, also include `token`. */
private String secretKey;

/** Specify `accessKey` and `secretKey`; for short-term credentials, also include `token`. */
private String token;
}
Original file line number Diff line number Diff line change
Expand Up @@ -30,10 +30,42 @@
@SuperBuilder
@AllArgsConstructor
@NoArgsConstructor
/*
If you want to import data into open-source Milvus,
you can use this method to import the data files stored in the bucket where Milvus resides.
*/
public class MilvusImportRequest extends BaseImportRequest {
private static final long serialVersionUID = -1958858397962018740L;
/**
* This parameter can be specified; defaults to the "default" database.
*/
private String dbName;

private String collectionName;

/**
* If the collection has partitionKey enabled:
* - The partitionName parameter cannot be specified for import.
* If the collection does not have partitionKey enabled:
* - You may specify partitionName for the import.
* - Defaults to the "default" partition if not specified.
*/
private String partitionName;

/**
* Data import can be configured in multiple ways using `files`:
* <p>
* 1. Multi-path import (multiple files):
* "files": [
* ["parquet-folder-1/1.parquet"],
* ["parquet-folder-2/1.parquet"],
* ["parquet-folder-3/1.parquet"]
* ]
* <p>
* 2. Single file import:
* "files": [
* ["parquet-folder/1.parquet"]
* ]
*/
private List<List<String>> files;
}
Original file line number Diff line number Diff line change
Expand Up @@ -30,12 +30,50 @@
@SuperBuilder
@AllArgsConstructor
@NoArgsConstructor
/*
If you want to import data into a Zilliz cloud instance and your data is stored in a Zilliz stage,
you can use this method to import the data from the stage.
*/
public class StageImportRequest extends BaseImportRequest {
private String stageName;
private List<List<String>> dataPaths;

private String clusterId;

/**
* For Free & Serverless deployments: specifying this parameter is not supported.
* For Dedicated deployments: this parameter can be specified; defaults to the "default" database.
*/
private String dbName;
private String collectionName;

/**
* If the collection has partitionKey enabled:
* - The partitionName parameter cannot be specified for import.
* If the collection does not have partitionKey enabled:
* - You may specify partitionName for the import.
* - Defaults to the "default" partition if not specified.
*/
private String partitionName;

private String stageName;

/**
* Data import can be configured in multiple ways using `dataPaths`:
* <p>
* 1. Multi-path import (multiple folders or files):
* "dataPaths": [
* ["parquet-folder-1/1.parquet"],
* ["parquet-folder-2/1.parquet"],
* ["parquet-folder-3/"]
* ]
* <p>
* 2. Folder import:
* "dataPaths": [
* ["parquet-folder/"]
* ]
* <p>
* 3. Single file import:
* "dataPaths": [
* ["parquet-folder/1.parquet"]
* ]
*/
private List<List<String>> dataPaths;
}
Loading