Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 6 additions & 1 deletion .github/workflows/cre-regression-system-tests.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -183,6 +183,11 @@ jobs:
env:
TEST_NAME: ${{ matrix.tests.test_name }}
TEST_TIMEOUT: 30m
PARALLEL_COUNT: "10"
# parallelisation flags for tests
# fanout is necessary, because multiple tests would otherwise start chip test sinks at the same port, causing conflicts
CRE_TEST_PARALLEL_ENABLED: "true"
CRE_TEST_CHIP_SINK_FANOUT_ENABLED: "true"
run: |
echo "Starting test: '${TEST_NAME}'"
echo "⚠️⚠️⚠️ Add 'skip-e2e-regression' label to skip this step if necessary ⚠️⚠️⚠️"
Expand All @@ -192,7 +197,7 @@ jobs:
--junitfile=/tmp/junit-report-regression.xml \
--format=github-actions \
-- \
-v -run "^(${TEST_NAME})$" -timeout ${TEST_TIMEOUT} -count=1 -parallel=1 \
-v -run "^(${TEST_NAME})$" -timeout ${TEST_TIMEOUT} -count=1 -parallel=${PARALLEL_COUNT} \
github.com/smartcontractkit/chainlink/system-tests/tests/regression/cre

echo "⚠️⚠️⚠️ Add 'skip-e2e-regression' label to skip this step if necessary ⚠️⚠️⚠️"
Expand Down
7 changes: 6 additions & 1 deletion .github/workflows/cre-system-tests.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -299,14 +299,19 @@ jobs:
RUN_QUARANTINED_TESTS: "true" # always run quarantined tests in CI
TOPOLOGY_NAME: ${{ matrix.tests.topology }}
GITHUB_TOKEN: ${{ steps.github-token.outputs.access-token || '' }} # to avoid rate limiting when downloading protobuf files from GitHub
PARALLEL_COUNT: "10"
# parallelisation flags for tests
# fanout is necessary, because multiple tests would otherwise start chip test sinks at the same port, causing conflicts
CRE_TEST_PARALLEL_ENABLED: "true"
CRE_TEST_CHIP_SINK_FANOUT_ENABLED: "true"
run: |
echo "Starting test: '${TEST_NAME}'"
gotestsum \
--jsonfile=/tmp/gotest.log \
--junitfile=/tmp/junit-report.xml \
--format=github-actions \
-- \
-v -run "^(${TEST_NAME})$" -timeout "${TEST_TIMEOUT}" -count=1 -parallel=1 \
-v -run "^(${TEST_NAME})$" -timeout "${TEST_TIMEOUT}" -count=1 -parallel="${PARALLEL_COUNT}" \
github.com/smartcontractkit/chainlink/system-tests/tests/smoke/cre

exit_code=$?
Expand Down
19 changes: 19 additions & 0 deletions system-tests/lib/cre/environment/blockchains/evm/evm.go
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,25 @@ type Blockchain struct {
SethClient *seth.Client
}

// CloneWithSethClient returns a copy of the blockchain handle with a different Seth client.
// This lets tests use per-test keys while preserving immutable chain metadata.
func (e *Blockchain) CloneWithSethClient(sc *seth.Client) *Blockchain {
return &Blockchain{
testLogger: e.testLogger,
chainSelector: e.chainSelector,
chainID: e.chainID,
ctfOutput: e.ctfOutput,
SethClient: sc,
}
}

func (e *Blockchain) WSURL() string {
if len(e.ctfOutput.Nodes) == 0 {
return ""
}
return e.ctfOutput.Nodes[0].ExternalWSUrl
}

func (e *Blockchain) ChainSelector() uint64 {
return e.chainSelector
}
Expand Down
194 changes: 106 additions & 88 deletions system-tests/lib/cre/features/solana/v2/solana.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import (
"fmt"
"slices"
"strconv"
"sync"
"text/template"
"time"

Expand All @@ -15,6 +16,7 @@ import (
"github.com/pkg/errors"
"github.com/rs/zerolog"
chainselectors "github.com/smartcontractkit/chain-selectors"
"golang.org/x/sync/errgroup"
"google.golang.org/protobuf/types/known/durationpb"

capabilitiespb "github.com/smartcontractkit/chainlink-common/pkg/capabilities/pb"
Expand Down Expand Up @@ -174,106 +176,122 @@ func createJobs(
return errors.Wrapf(chErr, "failed to get Solana chain ID from selector %d", solChain.ChainSelector())
}

solChainID, err := solChain.SolClient.GetGenesisHash(ctx)
if err != nil {
return errors.Wrapf(err, "failed to get sol genesis hash")
}
version := creEnv.ContractVersions[cre_sol.ForwarderContract.String()]
creForwarderKey := datastore.NewAddressRefKey(
solChain.ChainSelector(),
cre_sol.ForwarderContract,
version,
cre_sol.DefaultForwarderQualifier,
)
creForwarderStateKey := datastore.NewAddressRefKey(
solChain.ChainSelector(),
cre_sol.ForwarderState,
version,
cre_sol.DefaultForwarderQualifier,
)
creForwarderAddress, err := creEnv.CldfEnvironment.DataStore.Addresses().Get(creForwarderKey)
if err != nil {
return errors.Wrap(err, "failed to get CRE Forwarder address")
}
creForwarderStateAddress, err := creEnv.CldfEnvironment.DataStore.Addresses().Get(creForwarderStateKey)
if err != nil {
return errors.Wrap(err, "failed to get CRE Forwarder State address")
}
tmpl, err := template.New("solConfig").Parse(configTemplate)
if err != nil {
return errors.Wrapf(err, "failed to parse %s config template", flag)
}

var specsMu sync.Mutex
group, groupCtx := errgroup.WithContext(ctx)
for _, workerNode := range workerNodes {
key, ok := workerNode.Keys.Solana[chainID]
if !ok {
return fmt.Errorf("failed to get solana key (chainID %s, node index %d)", chainID, workerNode.Index)
}
group.Go(func() error {
key, ok := workerNode.Keys.Solana[chainID]
if !ok {
return fmt.Errorf("failed to get solana key (chainID %s, node index %d)", chainID, workerNode.Index)
}

version := creEnv.ContractVersions[cre_sol.ForwarderContract.String()]
nodeAddress := key.PublicAddress.String()
runtimeFallbacks := map[string]any{
"CREForwarderAddress": creForwarderAddress.Address,
"CREForwarderState": creForwarderStateAddress.Address,
"NodeAddress": nodeAddress,
"IsLocal": true,
"Network": "solana",
"ChainID": solChainID.String(),
}

creForwarderKey := datastore.NewAddressRefKey(
solChain.ChainSelector(),
cre_sol.ForwarderContract,
version,
cre_sol.DefaultForwarderQualifier,
)
creForwarderStateKey := datastore.NewAddressRefKey(
solChain.ChainSelector(),
cre_sol.ForwarderState,
version,
cre_sol.DefaultForwarderQualifier,
)
creForwarderAddress, err := creEnv.CldfEnvironment.DataStore.Addresses().Get(creForwarderKey)
if err != nil {
return errors.Wrap(err, "failed to get CRE Forwarder address")
}
creForwarderStateAddress, err := creEnv.CldfEnvironment.DataStore.Addresses().Get(creForwarderStateKey)
if err != nil {
return errors.Wrap(err, "failed to get CRE Forwarder State address")
}
templateData, aErr := credon.ApplyRuntimeValues(config.Values, runtimeFallbacks)
if aErr != nil {
return errors.Wrap(aErr, "failed to apply runtime values")
}

nodeAddress := key.PublicAddress.String()
tmpl, err := template.New("solConfig").Parse(configTemplate)
if err != nil {
return errors.Wrapf(err, "failed to parse %s config template", flag)
}
var configBuffer bytes.Buffer
if err := tmpl.Execute(&configBuffer, templateData); err != nil {
return errors.Wrapf(err, "failed to execute %s config template", flag)
}

solChainID, err := solChain.SolClient.GetGenesisHash(ctx)
if err != nil {
return errors.Wrapf(err, "failed to get sol genesis hash")
}
runtimeFallbacks := map[string]any{
"CREForwarderAddress": creForwarderAddress.Address,
"CREForwarderState": creForwarderStateAddress.Address,
"NodeAddress": nodeAddress,
"IsLocal": true,
"Network": "solana",
"ChainID": solChainID.String(),
}
configStr := configBuffer.String()
if err := credon.ValidateTemplateSubstitution(configStr, flag); err != nil {
return errors.Wrapf(err, "%s template validation failed", flag)
}

templateData, aErr := credon.ApplyRuntimeValues(config.Values, runtimeFallbacks)
if aErr != nil {
return errors.Wrap(aErr, "failed to apply runtime values")
}
workerInput := cre_jobs.ProposeJobSpecInput{
Domain: offchain.ProductLabel,
Environment: cre.EnvironmentName,
DONName: don.Name,
JobName: "sol-v2-worker-" + chainID,
ExtraLabels: map[string]string{cre.CapabilityLabelKey: flag},
DONFilters: []offchain.TargetDONFilter{
{Key: offchain.FilterKeyDONName, Value: don.Name},
{Key: "p2p_id", Value: workerNode.Keys.PeerID()}, // required since each node requires a different config (it contains its own from address)
},
Template: job_types.Solana,
Inputs: job_types.JobSpecInput{
"command": command,
"config": configStr,
},
}

var configBuffer bytes.Buffer
if err := tmpl.Execute(&configBuffer, templateData); err != nil {
return errors.Wrapf(err, "failed to execute %s config template", flag)
}
workerVerErr := cre_jobs.ProposeJobSpec{}.VerifyPreconditions(*creEnv.CldfEnvironment, workerInput)
if workerVerErr != nil {
return fmt.Errorf("precondition verification failed for Solana v2 worker job: %w", workerVerErr)
}

configStr := configBuffer.String()
if err := credon.ValidateTemplateSubstitution(configStr, flag); err != nil {
return errors.Wrapf(err, "%s template validation failed", flag)
}
workerReport, workerErr := cre_jobs.ProposeJobSpec{}.Apply(*creEnv.CldfEnvironment, workerInput)
if workerErr != nil {
return fmt.Errorf("failed to propose Solana v2 worker job spec: %w", workerErr)
}

workerInput := cre_jobs.ProposeJobSpecInput{
Domain: offchain.ProductLabel,
Environment: cre.EnvironmentName,
DONName: don.Name,
JobName: "sol-v2-worker-" + chainID,
ExtraLabels: map[string]string{cre.CapabilityLabelKey: flag},
DONFilters: []offchain.TargetDONFilter{
{Key: offchain.FilterKeyDONName, Value: don.Name},
{Key: "p2p_id", Value: workerNode.Keys.PeerID()}, // required since each node requires a different config (it contains its own from address)
},
Template: job_types.Solana,
Inputs: job_types.JobSpecInput{
"command": command,
"config": configStr,
},
}
specsMu.Lock()
defer specsMu.Unlock()
for _, r := range workerReport.Reports {
out, ok := r.Output.(cre_jobs_ops.ProposeStandardCapabilityJobOutput)
if !ok {
return fmt.Errorf("unable to cast to ProposeStandardCapabilityJobOutput, actual type: %T", r.Output)
}
mErr := mergo.Merge(&specs, out.Specs, mergo.WithAppendSlice)
if mErr != nil {
return fmt.Errorf("failed to merge worker job specs: %w", mErr)
}
}

workerVerErr := cre_jobs.ProposeJobSpec{}.VerifyPreconditions(*creEnv.CldfEnvironment, workerInput)
if workerVerErr != nil {
return fmt.Errorf("precondition verification failed for Solana v2 worker job: %w", workerVerErr)
}
select {
case <-groupCtx.Done():
return groupCtx.Err()
default:
}

workerReport, workerErr := cre_jobs.ProposeJobSpec{}.Apply(*creEnv.CldfEnvironment, workerInput)
if workerErr != nil {
return fmt.Errorf("failed to propose Solana v2 worker job spec: %w", workerErr)
}
return nil
})
}

for _, r := range workerReport.Reports {
out, ok := r.Output.(cre_jobs_ops.ProposeStandardCapabilityJobOutput)
if !ok {
return fmt.Errorf("unable to cast to ProposeStandardCapabilityJobOutput, actual type: %T", r.Output)
}
mErr := mergo.Merge(&specs, out.Specs, mergo.WithAppendSlice)
if mErr != nil {
return fmt.Errorf("failed to merge worker job specs: %w", mErr)
}
}
if err := group.Wait(); err != nil {
return err
}

approveErr := jobs.Approve(ctx, creEnv.CldfEnvironment.Offchain, dons, specs)
Expand Down
34 changes: 25 additions & 9 deletions system-tests/lib/cre/workflow/compile.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,25 @@ const (
// It will return an error if the workflow name is less than 10 characters long.
// It will return an error if the workflow file path is not a valid file path.
func CompileWorkflow(ctx context.Context, workflowFilePath, workflowName string) (string, error) {
return CompileWorkflowToDir(ctx, workflowFilePath, workflowName, "")
}

// CompileWorkflowToDir compiles a workflow and stores build artifacts in outputDir.
// If outputDir is empty, a temporary directory is created automatically.
func CompileWorkflowToDir(ctx context.Context, workflowFilePath, workflowName, outputDir string) (string, error) {
if len(workflowName) < 10 {
return "", errors.New("workflow name must be at least 10 characters long")
}
if outputDir == "" {
var err error
outputDir, err = os.MkdirTemp("", "cre-workflow-build-*")
if err != nil {
return "", errors.Wrap(err, "failed to create temporary workflow build dir")
}
}
if mkErr := os.MkdirAll(outputDir, 0o755); mkErr != nil {
return "", errors.Wrap(mkErr, "failed to prepare workflow build dir")
}

language, lErr := delectLanguage(workflowFilePath)
if lErr != nil {
Expand All @@ -41,9 +57,9 @@ func CompileWorkflow(ctx context.Context, workflowFilePath, workflowName string)
var err error
switch language {
case LanguageGo:
workflowWasmAbsPath, err = compileGoWorkflow(ctx, workflowFilePath, workflowName)
workflowWasmAbsPath, err = compileGoWorkflow(ctx, workflowFilePath, workflowName, outputDir)
case LanguageTS:
workflowWasmAbsPath, err = compileTSWorkflow(ctx, workflowFilePath, workflowName)
workflowWasmAbsPath, err = compileTSWorkflow(ctx, workflowFilePath, workflowName, outputDir)
default:
return "", fmt.Errorf("unsupported workflow language: %s", language)
}
Expand Down Expand Up @@ -76,25 +92,25 @@ func delectLanguage(workflowFilePath string) (Language, error) {
}
}

func compileTSWorkflow(ctx context.Context, workflowFilePath, workflowName string) (string, error) {
workflowWasmPath := workflowName + ".wasm"
func compileTSWorkflow(ctx context.Context, workflowFilePath, workflowName, outputDir string) (string, error) {
workflowWasmPath := filepath.Join(outputDir, workflowName+".wasm")

compileCmd := exec.CommandContext(ctx, "bun", "cre-compile", workflowFilePath, filepath.Join(filepath.Dir(workflowFilePath), workflowWasmPath)) // #nosec G204 -- we control the value of the cmd so the lint/sec error is a false positive
compileCmd := exec.CommandContext(ctx, "bun", "cre-compile", workflowFilePath, workflowWasmPath) // #nosec G204 -- we control the value of the cmd so the lint/sec error is a false positive
if output, err := compileCmd.CombinedOutput(); err != nil {
fmt.Fprint(os.Stderr, string(output))
return "", errors.Wrap(err, "failed to compile workflow")
}

workflowWasmAbsPath, workflowWasmAbsPathErr := filepath.Abs(filepath.Join(filepath.Dir(workflowFilePath), workflowWasmPath))
workflowWasmAbsPath, workflowWasmAbsPathErr := filepath.Abs(workflowWasmPath)
if workflowWasmAbsPathErr != nil {
return "", errors.Wrap(workflowWasmAbsPathErr, "failed to get absolute path of the workflow WASM file")
}

return workflowWasmAbsPath, nil
}

func compileGoWorkflow(ctx context.Context, workflowFilePath, workflowName string) (string, error) {
workflowWasmPath := workflowName + ".wasm"
func compileGoWorkflow(ctx context.Context, workflowFilePath, workflowName, outputDir string) (string, error) {
workflowWasmPath := filepath.Join(outputDir, workflowName+".wasm")

goModTidyCmd := exec.CommandContext(ctx, "go", "mod", "tidy")
goModTidyCmd.Dir = filepath.Dir(workflowFilePath)
Expand All @@ -110,7 +126,7 @@ func compileGoWorkflow(ctx context.Context, workflowFilePath, workflowName strin
return "", errors.Wrap(err, "failed to compile workflow")
}

workflowWasmAbsPath, workflowWasmAbsPathErr := filepath.Abs(filepath.Join(filepath.Dir(workflowFilePath), workflowWasmPath))
workflowWasmAbsPath, workflowWasmAbsPathErr := filepath.Abs(workflowWasmPath)
if workflowWasmAbsPathErr != nil {
return "", errors.Wrap(workflowWasmAbsPathErr, "failed to get absolute path of the workflow WASM file")
}
Expand Down
2 changes: 1 addition & 1 deletion system-tests/lib/infra/docker.go
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ func PrintFailedContainerLogs(logger zerolog.Logger, logLinesCount uint64) {

content = strings.TrimSpace(content)
if len(content) > 0 {
logger.Info().Str("Container", cName).Msgf("Last 100 lines of logs")
logger.Info().Str("Container", cName).Msgf("Last %d lines of logs", logLinesCount)
fmt.Println(text.RedText("%s\n", content))
}
_ = ioReader.Close() // can't do much about the error here
Expand Down
Loading
Loading