diff --git a/.github/workflows/ci-dgraph-integration2-tests.yml b/.github/workflows/ci-dgraph-integration2-tests.yml index 403a08465b4..3365387fe71 100644 --- a/.github/workflows/ci-dgraph-integration2-tests.yml +++ b/.github/workflows/ci-dgraph-integration2-tests.yml @@ -29,6 +29,15 @@ jobs: with: fetch-depth: 0 + - name: Restore benchmark dataset cache + uses: actions/cache/restore@v4 + with: + path: dgraphtest/datafiles + key: dataset-dgraphtest-${{ hashFiles('benchdata/benchmark-data-version') }} + + - name: Ensure datafiles directory + run: mkdir -p dgraphtest/datafiles + - name: Set up Go uses: actions/setup-go@v6 with: @@ -55,3 +64,10 @@ jobs: go clean -testcache # sleep sleep 5 + + - name: Save benchmark dataset cache + if: success() + uses: actions/cache/save@v4 + with: + path: dgraphtest/datafiles + key: dataset-dgraphtest-${{ hashFiles('benchdata/benchmark-data-version') }} diff --git a/.github/workflows/ci-dgraph-ldbc-tests.yml b/.github/workflows/ci-dgraph-ldbc-tests.yml index 045ad3d42e9..f31359ffd29 100644 --- a/.github/workflows/ci-dgraph-ldbc-tests.yml +++ b/.github/workflows/ci-dgraph-ldbc-tests.yml @@ -28,6 +28,12 @@ jobs: - name: Checkout Dgraph uses: actions/checkout@v5 + - name: Restore LDBC dataset cache + uses: actions/cache/restore@v4 + with: + path: ${{ github.workspace }}/test-data + key: dataset-ldbc-${{ hashFiles('benchdata/benchmark-data-version') }} + - name: Set up Go uses: actions/setup-go@v6 with: @@ -61,6 +67,13 @@ jobs: # move the binary cp dgraph/dgraph ~/go/bin/dgraph # run the ldbc tests - cd t; ./t --suite=ldbc + cd t; ./t --suite=ldbc --tmp=${{ github.workspace }}/test-data --keep-data # clean up docker containers after test execution ./t -r + + - name: Save LDBC dataset cache + if: success() + uses: actions/cache/save@v4 + with: + path: ${{ github.workspace }}/test-data + key: dataset-ldbc-${{ hashFiles('benchdata/benchmark-data-version') }} diff --git a/.github/workflows/ci-dgraph-load-tests.yml b/.github/workflows/ci-dgraph-load-tests.yml index a1967b59d53..73db29554d6 100644 --- a/.github/workflows/ci-dgraph-load-tests.yml +++ b/.github/workflows/ci-dgraph-load-tests.yml @@ -27,6 +27,12 @@ jobs: steps: - uses: actions/checkout@v5 + - name: Restore load test dataset cache + uses: actions/cache/restore@v4 + with: + path: ${{ github.workspace }}/test-data + key: dataset-load-${{ hashFiles('benchdata/benchmark-data-version') }} + - name: Set up Go uses: actions/setup-go@v6 with: @@ -60,8 +66,15 @@ jobs: # move the binary cp dgraph/dgraph ~/go/bin/dgraph # run the load tests - cd t; ./t --suite=load + cd t; ./t --suite=load --tmp=${{ github.workspace }}/test-data --keep-data # clean up docker containers after test execution ./t -r # sleep sleep 5 + + - name: Save load test dataset cache + if: success() + uses: actions/cache/save@v4 + with: + path: ${{ github.workspace }}/test-data + key: dataset-load-${{ hashFiles('benchdata/benchmark-data-version') }} diff --git a/benchdata/benchdata.go b/benchdata/benchdata.go new file mode 100644 index 00000000000..9c2ec42b5c3 --- /dev/null +++ b/benchdata/benchdata.go @@ -0,0 +1,454 @@ +/* + * SPDX-FileCopyrightText: © 2017-2025 Istari Digital, Inc. + * SPDX-License-Identifier: Apache-2.0 + */ +package benchdata + +import ( + "bytes" + "context" + "crypto/sha256" + "encoding/hex" + "fmt" + "io" + "log" + "net/http" + "os" + "path/filepath" + "runtime" + "strings" + "time" + + "golang.org/x/sync/errgroup" +) + +const ( + benchmarkRepo = "dgraph-io/dgraph-benchmarks" + envDataRef = "DGRAPH_TEST_DATA_REF" + dataVersionFile = "benchmark-data-version" + maxRetries = 3 + concurrentLimit = 5 +) + +// TestDataFile is a type-safe identifier for benchmark data files. +// Using a named type prevents typos and lets callers reference files +// via the exported constants instead of raw strings. +type TestDataFile string + +// Load-test data files. +const ( + OneMillionNoIndexSchema TestDataFile = "1million-noindex.schema" + OneMillionSchema TestDataFile = "1million.schema" + OneMillionRDF TestDataFile = "1million.rdf.gz" + TwentyOneMillSchema TestDataFile = "21million.schema" + TwentyOneMillRDF TestDataFile = "21million.rdf.gz" +) + +// LDBC data files. +const ( + LDBCTypesSchema TestDataFile = "ldbcTypes.schema" + LDBCDeltas TestDataFile = "Deltas.rdf" + LDBCComment TestDataFile = "comment_0.rdf" + LDBCContainerOf TestDataFile = "containerOf_0.rdf" + LDBCForum TestDataFile = "forum_0.rdf" + LDBCHasCreator TestDataFile = "hasCreator_0.rdf" + LDBCHasInterest TestDataFile = "hasInterest_0.rdf" + LDBCHasMember TestDataFile = "hasMember_0.rdf" + LDBCHasModerator TestDataFile = "hasModerator_0.rdf" + LDBCHasTag TestDataFile = "hasTag_0.rdf" + LDBCHasType TestDataFile = "hasType_0.rdf" + LDBCIsLocatedIn TestDataFile = "isLocatedIn_0.rdf" + LDBCIsPartOf TestDataFile = "isPartOf_0.rdf" + LDBCIsSubclassOf TestDataFile = "isSubclassOf_0.rdf" + LDBCKnows TestDataFile = "knows_0.rdf" + LDBCLikes TestDataFile = "likes_0.rdf" + LDBCOrganisation TestDataFile = "organisation_0.rdf" + LDBCPerson TestDataFile = "person_0.rdf" + LDBCPlace TestDataFile = "place_0.rdf" + LDBCPost TestDataFile = "post_0.rdf" + LDBCReplyOf TestDataFile = "replyOf_0.rdf" + LDBCStudyAt TestDataFile = "studyAt_0.rdf" + LDBCTag TestDataFile = "tag_0.rdf" + LDBCTagclass TestDataFile = "tagclass_0.rdf" + LDBCWorkAt TestDataFile = "workAt_0.rdf" +) + +// Pre-built file groups for common test suites. +var ( + LoadTestFiles []TestDataFile + LDBCFiles []TestDataFile + AllFiles []TestDataFile +) + +// repoFilePath maps each TestDataFile to its path within the dgraph-benchmarks repo. +var repoFilePath = map[TestDataFile]string{ + OneMillionNoIndexSchema: "data/1million-noindex.schema", + OneMillionSchema: "data/1million.schema", + OneMillionRDF: "data/1million.rdf.gz", + TwentyOneMillSchema: "data/21million.schema", + TwentyOneMillRDF: "data/21million.rdf.gz", + LDBCTypesSchema: "ldbc/sf0.3/ldbcTypes.schema", + LDBCDeltas: "ldbc/sf0.3/ldbc_rdf_0.3/Deltas.rdf", + LDBCComment: "ldbc/sf0.3/ldbc_rdf_0.3/comment_0.rdf", + LDBCContainerOf: "ldbc/sf0.3/ldbc_rdf_0.3/containerOf_0.rdf", + LDBCForum: "ldbc/sf0.3/ldbc_rdf_0.3/forum_0.rdf", + LDBCHasCreator: "ldbc/sf0.3/ldbc_rdf_0.3/hasCreator_0.rdf", + LDBCHasInterest: "ldbc/sf0.3/ldbc_rdf_0.3/hasInterest_0.rdf", + LDBCHasMember: "ldbc/sf0.3/ldbc_rdf_0.3/hasMember_0.rdf", + LDBCHasModerator: "ldbc/sf0.3/ldbc_rdf_0.3/hasModerator_0.rdf", + LDBCHasTag: "ldbc/sf0.3/ldbc_rdf_0.3/hasTag_0.rdf", + LDBCHasType: "ldbc/sf0.3/ldbc_rdf_0.3/hasType_0.rdf", + LDBCIsLocatedIn: "ldbc/sf0.3/ldbc_rdf_0.3/isLocatedIn_0.rdf", + LDBCIsPartOf: "ldbc/sf0.3/ldbc_rdf_0.3/isPartOf_0.rdf", + LDBCIsSubclassOf: "ldbc/sf0.3/ldbc_rdf_0.3/isSubclassOf_0.rdf", + LDBCKnows: "ldbc/sf0.3/ldbc_rdf_0.3/knows_0.rdf", + LDBCLikes: "ldbc/sf0.3/ldbc_rdf_0.3/likes_0.rdf", + LDBCOrganisation: "ldbc/sf0.3/ldbc_rdf_0.3/organisation_0.rdf", + LDBCPerson: "ldbc/sf0.3/ldbc_rdf_0.3/person_0.rdf", + LDBCPlace: "ldbc/sf0.3/ldbc_rdf_0.3/place_0.rdf", + LDBCPost: "ldbc/sf0.3/ldbc_rdf_0.3/post_0.rdf", + LDBCReplyOf: "ldbc/sf0.3/ldbc_rdf_0.3/replyOf_0.rdf", + LDBCStudyAt: "ldbc/sf0.3/ldbc_rdf_0.3/studyAt_0.rdf", + LDBCTag: "ldbc/sf0.3/ldbc_rdf_0.3/tag_0.rdf", + LDBCTagclass: "ldbc/sf0.3/ldbc_rdf_0.3/tagclass_0.rdf", + LDBCWorkAt: "ldbc/sf0.3/ldbc_rdf_0.3/workAt_0.rdf", +} + +func init() { + LoadTestFiles = []TestDataFile{ + OneMillionNoIndexSchema, OneMillionSchema, OneMillionRDF, + TwentyOneMillSchema, TwentyOneMillRDF, + } + LDBCFiles = []TestDataFile{ + LDBCTypesSchema, + LDBCDeltas, LDBCComment, LDBCContainerOf, LDBCForum, + LDBCHasCreator, LDBCHasInterest, LDBCHasMember, LDBCHasModerator, + LDBCHasTag, LDBCHasType, LDBCIsLocatedIn, LDBCIsPartOf, + LDBCIsSubclassOf, LDBCKnows, LDBCLikes, LDBCOrganisation, + LDBCPerson, LDBCPlace, LDBCPost, LDBCReplyOf, + LDBCStudyAt, LDBCTag, LDBCTagclass, LDBCWorkAt, + } + AllFiles = make([]TestDataFile, 0, len(repoFilePath)) + for k := range repoFilePath { + AllFiles = append(AllFiles, k) + } +} + +// DataRef returns the git ref (branch, tag, or SHA) to use when downloading +// benchmark data. Resolution order: +// 1. refOverride argument (e.g. from a --data-ref CLI flag) +// 2. DGRAPH_TEST_DATA_REF environment variable +// 3. Contents of benchdata/benchmark-data-version file +// 4. Falls back to "main" +func DataRef(refOverride string) string { + if refOverride != "" { + return refOverride + } + if v := os.Getenv(envDataRef); v != "" { + return v + } + versionFile := filepath.Join(pkgDir(), dataVersionFile) + if data, err := os.ReadFile(versionFile); err == nil { + if ref := strings.TrimSpace(string(data)); ref != "" { + return ref + } + } + return "main" +} + +// EnsureFiles ensures the specified test data files exist in destDir, +// downloading any missing or invalid ones from dgraph-benchmarks at the given +// git ref. If no files are specified, all registered files are ensured. +// Returns the local paths of all requested files. +// +// LFS-tracked files are automatically detected via the GitHub Contents API +// and downloaded from the appropriate CDN URL. SHA256 checksums from the LFS +// pointer are verified after download. +func EnsureFiles(destDir, ref string, files ...TestDataFile) ([]string, error) { + if len(files) == 0 { + files = AllFiles + } + if err := os.MkdirAll(destDir, 0o755); err != nil { + return nil, fmt.Errorf("creating dest dir: %w", err) + } + + paths := make([]string, len(files)) + for i, f := range files { + paths[i] = filepath.Join(destDir, string(f)) + } + + g, _ := errgroup.WithContext(context.Background()) + g.SetLimit(concurrentLimit) + + for i, f := range files { + dest := paths[i] + if fileExistsAndValid(dest) { + log.Printf("Skipping %s (already exists)", f) + continue + } + g.Go(func() error { + start := time.Now() + if err := resolveAndDownload(f, ref, dest); err != nil { + return fmt.Errorf("downloading %s: %w", f, err) + } + log.Printf("Downloaded %s in %s", f, time.Since(start)) + return nil + }) + } + + if err := g.Wait(); err != nil { + return nil, err + } + return paths, nil +} + +type lfsPointerInfo struct { + OID string + Size int64 +} + +// detectLFS checks whether a file in the benchmark repo is LFS-tracked by +// fetching the raw git blob via the GitHub Contents API. For LFS-tracked files +// the API returns the pointer text (~130 bytes) regardless of the actual file +// size, so this works reliably for files of any size. +func detectLFS(repoPath, ref string) (*lfsPointerInfo, error) { + token := os.Getenv("GITHUB_TOKEN") + content, err := fetchRawBlob(repoPath, ref, token) + if err != nil { + return nil, err + } + return parseLFSPointer(content), nil +} + +func fetchRawBlob(path, ref, token string) ([]byte, error) { + url := fmt.Sprintf("https://api.github.com/repos/%s/contents/%s?ref=%s", + benchmarkRepo, path, ref) + + req, err := http.NewRequest("GET", url, nil) + if err != nil { + return nil, err + } + req.Header.Set("Accept", "application/vnd.github.v3.raw") + if token != "" { + req.Header.Set("Authorization", "Bearer "+token) + } + + resp, err := http.DefaultClient.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + body, _ := io.ReadAll(resp.Body) + return nil, fmt.Errorf("GitHub Contents API raw %d: %s", resp.StatusCode, body) + } + + // LFS pointers are ~130 bytes. Cap the read to avoid buffering large + // non-LFS file content just for the LFS check. + const maxPointerSize = 1024 + buf := make([]byte, maxPointerSize) + n, err := io.ReadFull(resp.Body, buf) + if err == io.ErrUnexpectedEOF { + // File is smaller than 1KB -- that's fine, use what we got. + return buf[:n], nil + } + if err != nil { + return nil, err + } + // Read exactly 1KB. If there's more data, it's not an LFS pointer. + return buf[:n], nil +} + +func parseLFSPointer(content []byte) *lfsPointerInfo { + s := string(content) + if !strings.HasPrefix(s, "version https://git-lfs.github.com/spec/v1") { + return nil + } + info := &lfsPointerInfo{} + for _, line := range strings.Split(s, "\n") { + line = strings.TrimSpace(line) + if strings.HasPrefix(line, "oid sha256:") { + info.OID = strings.TrimPrefix(line, "oid sha256:") + } + if strings.HasPrefix(line, "size ") { + if _, err := fmt.Sscanf(line, "size %d", &info.Size); err != nil { + return nil + } + } + } + if info.OID == "" { + return nil + } + return info +} + +// resolveAndDownload determines the correct download URL for a file (auto- +// detecting LFS), downloads it with retry, and validates the result. +func resolveAndDownload(file TestDataFile, ref, destPath string) error { + path, ok := repoFilePath[file] + if !ok { + return fmt.Errorf("unknown test data file: %s", file) + } + + lfsInfo, err := detectLFS(path, ref) + if err != nil { + // GitHub API unavailable (rate limit, network). Try media URL first + // (handles LFS), fall back to raw URL (handles non-LFS). + log.Printf("warning: LFS detection failed for %s, trying both CDN URLs: %v", file, err) + mediaURL := fmt.Sprintf("https://media.githubusercontent.com/media/%s/%s/%s", + benchmarkRepo, ref, path) + rawURL := fmt.Sprintf("https://raw.githubusercontent.com/%s/%s/%s", + benchmarkRepo, ref, path) + if dlErr := downloadWithRetry(mediaURL, destPath); dlErr == nil { + if valErr := validateFile(destPath); valErr == nil { + return nil + } + } + if dlErr := downloadWithRetry(rawURL, destPath); dlErr != nil { + return dlErr + } + return validateFile(destPath) + } + + if lfsInfo != nil { + url := fmt.Sprintf("https://media.githubusercontent.com/media/%s/%s/%s", + benchmarkRepo, ref, path) + if err := downloadWithRetry(url, destPath); err != nil { + return err + } + if err := verifyChecksum(destPath, lfsInfo); err != nil { + _ = os.Remove(destPath) + return fmt.Errorf("integrity check failed: %w", err) + } + log.Printf("verified %s: SHA256=%s size=%d", file, lfsInfo.OID, lfsInfo.Size) + } else { + url := fmt.Sprintf("https://raw.githubusercontent.com/%s/%s/%s", + benchmarkRepo, ref, path) + if err := downloadWithRetry(url, destPath); err != nil { + return err + } + } + + return validateFile(destPath) +} + +// downloadWithRetry downloads a URL to destPath with Go-level retry +// (3 attempts, exponential backoff). On final failure the partial file +// is removed. +func downloadWithRetry(url, destPath string) error { + token := os.Getenv("GITHUB_TOKEN") + for attempt := 1; attempt <= maxRetries; attempt++ { + if err := downloadToFile(url, destPath, token); err != nil { + log.Printf("attempt %d/%d failed to download %s: %v", + attempt, maxRetries, filepath.Base(destPath), err) + _ = os.Remove(destPath) + if attempt < maxRetries { + time.Sleep(time.Duration(attempt*5) * time.Second) + continue + } + return fmt.Errorf("failed after %d attempts: %w", maxRetries, err) + } + return nil + } + return nil +} + +// downloadToFile streams a URL to a local file. +func downloadToFile(url, destPath, token string) error { + req, err := http.NewRequest("GET", url, nil) + if err != nil { + return fmt.Errorf("creating request: %w", err) + } + if token != "" && strings.Contains(url, "githubusercontent.com") { + req.Header.Set("Authorization", "Bearer "+token) + } + + resp, err := http.DefaultClient.Do(req) + if err != nil { + return err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return fmt.Errorf("HTTP %d from %s", resp.StatusCode, url) + } + + f, err := os.Create(destPath) + if err != nil { + return err + } + defer f.Close() + + if _, err := io.Copy(f, resp.Body); err != nil { + return err + } + return nil +} + +var ( + lfsPointerPrefix = []byte("version https://git-lfs.github.com") + gzipMagic = []byte{0x1f, 0x8b} +) + +func fileExistsAndValid(fpath string) bool { + fi, err := os.Stat(fpath) + if err != nil || fi.IsDir() || fi.Size() == 0 { + return false + } + return validateFile(fpath) == nil +} + +func validateFile(fpath string) error { + f, err := os.Open(fpath) + if err != nil { + return err + } + defer f.Close() + + header := make([]byte, 64) + n, err := f.Read(header) + if err != nil { + return fmt.Errorf("reading file header: %w", err) + } + header = header[:n] + + if bytes.HasPrefix(header, lfsPointerPrefix) { + return fmt.Errorf("file is a Git LFS pointer, not actual content") + } + if strings.HasSuffix(fpath, ".gz") && (n < 2 || !bytes.HasPrefix(header, gzipMagic)) { + return fmt.Errorf("file does not have valid gzip header") + } + return nil +} + +func verifyChecksum(fpath string, info *lfsPointerInfo) error { + fi, err := os.Stat(fpath) + if err != nil { + return err + } + if fi.Size() != info.Size { + return fmt.Errorf("size mismatch: local %d != expected %d", fi.Size(), info.Size) + } + + f, err := os.Open(fpath) + if err != nil { + return err + } + defer f.Close() + + h := sha256.New() + if _, err := io.Copy(h, f); err != nil { + return fmt.Errorf("computing SHA256: %w", err) + } + actual := hex.EncodeToString(h.Sum(nil)) + if actual != info.OID { + return fmt.Errorf("SHA256 mismatch: local %s != expected %s", actual, info.OID) + } + return nil +} + +func pkgDir() string { + _, thisFile, _, _ := runtime.Caller(0) + return filepath.Dir(thisFile) +} diff --git a/benchdata/benchmark-data-version b/benchdata/benchmark-data-version new file mode 100644 index 00000000000..ba2906d0666 --- /dev/null +++ b/benchdata/benchmark-data-version @@ -0,0 +1 @@ +main diff --git a/dgraphtest/load.go b/dgraphtest/load.go index 116c04a6b5c..35f252737ed 100644 --- a/dgraphtest/load.go +++ b/dgraphtest/load.go @@ -24,6 +24,7 @@ import ( "github.com/pkg/errors" "github.com/dgraph-io/dgo/v250/protos/api" + "github.com/dgraph-io/dgraph/v25/benchdata" "github.com/dgraph-io/dgraph/v25/dgraphapi" "github.com/dgraph-io/dgraph/v25/enc" "github.com/dgraph-io/dgraph/v25/x" @@ -40,13 +41,6 @@ func (c *LocalCluster) HostDgraphBinaryPath() string { return filepath.Join(c.tempBinDir, "dgraph_host") } -var datafiles = map[string]string{ - "1million.schema": "https://github.com/dgraph-io/dgraph-benchmarks/blob/main/data/1million.schema?raw=true", - "1million.rdf.gz": "https://github.com/dgraph-io/dgraph-benchmarks/blob/main/data/1million.rdf.gz?raw=true", - "21million.schema": "https://github.com/dgraph-io/dgraph-benchmarks/blob/main/data/21million.schema?raw=true", - "21million.rdf.gz": "https://github.com/dgraph-io/dgraph-benchmarks/blob/main/data/21million.rdf.gz?raw=true", -} - type DatasetType int type Dataset struct { name string @@ -590,25 +584,9 @@ func (d *Dataset) GqlSchemaPath() string { } func (d *Dataset) ensureFile(filename string) string { - fullPath := filepath.Join(datasetFilesPath, filename) - if exists, _ := fileExists(fullPath); !exists { - url, ok := datafiles[filename] - if !ok { - panic(fmt.Sprintf("dataset file %s not found in datafiles map", filename)) - } - if err := downloadFile(filename, url); err != nil { - panic(fmt.Sprintf("failed to download %s: %v", filename, err)) - } - } - return fullPath -} - -func downloadFile(fname, url string) error { - cmd := exec.Command("wget", "-O", fname, url) - cmd.Dir = datasetFilesPath - - if _, err := cmd.CombinedOutput(); err != nil { - return fmt.Errorf("error downloading file %s: %w", fname, err) + paths, err := benchdata.EnsureFiles(datasetFilesPath, benchdata.DataRef(""), benchdata.TestDataFile(filename)) + if err != nil { + panic(fmt.Sprintf("failed to ensure %s: %v", filename, err)) } - return nil + return paths[0] } diff --git a/t/t.go b/t/t.go index 168b6d364c7..831031c0df2 100644 --- a/t/t.go +++ b/t/t.go @@ -35,6 +35,7 @@ import ( "github.com/spf13/pflag" "golang.org/x/tools/go/packages" + "github.com/dgraph-io/dgraph/v25/benchdata" "github.com/dgraph-io/dgraph/v25/testutil" "github.com/dgraph-io/dgraph/v25/x" "github.com/dgraph-io/ristretto/v2/z" @@ -93,7 +94,13 @@ var ( "unit = true unit tests only (no Docker, no integration tag). "+ "integration = everything except ldbc, load, and systest-heavy (with Docker). "+ "systest = systest-baseline + systest-heavy.") - tmp = pflag.String("tmp", "", "Temporary directory used to download data.") + tmp = pflag.String("tmp", "", "Temporary directory used to download data.") + keepData = pflag.Bool("keep-data", false, + "If true, do not remove the data directory after tests complete. "+ + "Useful in CI where data is cached between runs.") + dataRef = pflag.String("data-ref", "", + "Git ref (branch, tag, or SHA) for dgraph-benchmarks data. "+ + "Overrides DGRAPH_TEST_DATA_REF env var and benchmark-data-version file.") downloadResources = pflag.BoolP("download", "d", true, "Flag to specify whether to download resources or not") race = pflag.Bool("race", false, "Set true to build with race") @@ -1121,91 +1128,26 @@ func isHeavyPackage(pkg string) bool { return false } -var datafiles = map[string]string{ - "1million-noindex.schema": "https://raw.githubusercontent.com/dgraph-io/dgraph-benchmarks/refs/heads/main/data/1million-noindex.schema", - "1million.schema": "https://raw.githubusercontent.com/dgraph-io/dgraph-benchmarks/refs/heads/main/data/1million.schema", - "1million.rdf.gz": "https://media.githubusercontent.com/media/dgraph-io/dgraph-benchmarks/refs/heads/main/data/1million.rdf.gz", - "21million.schema": "https://raw.githubusercontent.com/dgraph-io/dgraph-benchmarks/refs/heads/main/data/21million.schema", - "21million.rdf.gz": "https://media.githubusercontent.com/media/dgraph-io/dgraph-benchmarks/refs/heads/main/data/21million.rdf.gz", -} - -var baseUrl = "https://media.githubusercontent.com/media/dgraph-io/dgraph-benchmarks/refs/heads/main/ldbc/sf0.3/ldbc_rdf_0.3/" -var suffix = "?raw=true" - -var rdfFileNames = [...]string{ - "Deltas.rdf", - "comment_0.rdf", - "containerOf_0.rdf", - "forum_0.rdf", - "hasCreator_0.rdf", - "hasInterest_0.rdf", - "hasMember_0.rdf", - "hasModerator_0.rdf", - "hasTag_0.rdf", - "hasType_0.rdf", - "isLocatedIn_0.rdf", - "isPartOf_0.rdf", - "isSubclassOf_0.rdf", - "knows_0.rdf", - "likes_0.rdf", - "organisation_0.rdf", - "person_0.rdf", - "place_0.rdf", - "post_0.rdf", - "replyOf_0.rdf", - "studyAt_0.rdf", - "tag_0.rdf", - "tagclass_0.rdf", - "workAt_0.rdf"} - -var ldbcDataFiles = map[string]string{ - "ldbcTypes.schema": "https://github.com/dgraph-io/dgraph-benchmarks/blob/main/ldbc/sf0.3/ldbcTypes.schema?raw=true", -} - -func downloadDataFiles() { +func downloadDataFiles() error { if !*downloadResources { fmt.Print("Skipping downloading of resources\n") - return - } - for fname, link := range datafiles { - cmd := exec.Command("wget", "-O", fname, link) - cmd.Dir = *tmp - - if out, err := cmd.CombinedOutput(); err != nil { - fmt.Printf("Error %v\n", err) - panic(fmt.Sprintf("error downloading a file: %s", string(out))) - } + return nil } + ref := benchdata.DataRef(*dataRef) + fmt.Printf("Using benchmark data ref: %s\n", ref) + _, err := benchdata.EnsureFiles(*tmp, ref, benchdata.LoadTestFiles...) + return err } -func downloadLDBCFiles(dir string) { +func downloadLDBCFiles(dir string) error { if !*downloadResources { fmt.Print("Skipping downloading of resources\n") - return - } - - for _, name := range rdfFileNames { - ldbcDataFiles[name] = baseUrl + name + suffix - } - - start := time.Now() - var wg sync.WaitGroup - for fname, link := range ldbcDataFiles { - wg.Add(1) - go func(fname, link string, wg *sync.WaitGroup) { - defer wg.Done() - start := time.Now() - cmd := exec.Command("wget", "-O", fname, link) - cmd.Dir = dir - if out, err := cmd.CombinedOutput(); err != nil { - fmt.Printf("Error %v\n", err) - panic(fmt.Sprintf("error downloading a file: %s", string(out))) - } - fmt.Printf("Downloaded %s to %s in %s \n", fname, dir, time.Since(start)) - }(fname, link, &wg) + return nil } - wg.Wait() - fmt.Printf("Downloaded %d files in %s \n", len(ldbcDataFiles), time.Since(start)) + ref := benchdata.DataRef(*dataRef) + fmt.Printf("Using benchmark data ref: %s\n", ref) + _, err := benchdata.EnsureFiles(dir, ref, benchdata.LDBCFiles...) + return err } func createTestCoverageFile(path string) error { @@ -1387,10 +1329,15 @@ func run() error { needsData := testSuiteContainsAny("load", "ldbc", "all") if needsData && *tmp == "" { *tmp = filepath.Join(os.TempDir(), "dgraph-test-data") - x.Check(testutil.MakeDirEmpty([]string{*tmp})) + } + if needsData { + x.Check(os.MkdirAll(*tmp, 0755)) } if testSuiteContainsAny("load", "all") { - downloadDataFiles() + if err := downloadDataFiles(); err != nil { + fmt.Printf("Failed to download data files: %v\n", err) + return + } } if testSuiteContainsAny("ldbc", "all") { // LDBC files go into a subdirectory because the LDBC test bulk-loads @@ -1398,7 +1345,10 @@ func run() error { // with LDBC data causes schema mismatches. ldbcDir := filepath.Join(*tmp, "ldbc") x.Check(os.MkdirAll(ldbcDir, 0755)) - downloadLDBCFiles(ldbcDir) + if err := downloadLDBCFiles(ldbcDir); err != nil { + fmt.Printf("Failed to download LDBC files: %v\n", err) + return + } } for i, task := range valid { select { @@ -1449,7 +1399,9 @@ func main() { procId = rand.Intn(1000) err := run() - _ = os.RemoveAll(*tmp) + if !*keepData && *tmp != "" { + _ = os.RemoveAll(*tmp) + } if err != nil { os.Exit(1) }