-
Notifications
You must be signed in to change notification settings - Fork 3
Expand file tree
/
Copy pathapplication-dvd.conf
More file actions
64 lines (59 loc) · 2.22 KB
/
application-dvd.conf
File metadata and controls
64 lines (59 loc) · 2.22 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
flags {
enableCount = false
enableCount = ${?ENABLE_COUNT}
enableGenerateData = false
enableGenerateData = ${?ENABLE_GENERATE_DATA}
enableGeneratePlanAndTasks = true
enableGeneratePlanAndTasks = ${?ENABLE_GENERATE_PLAN_AND_TASKS}
enableRecordTracking = false
enableRecordTracking = ${?ENABLE_RECORD_TRACKING}
enableDeleteGeneratedRecords = false
enableDeleteGeneratedRecords = ${?ENABLE_DELETE_GENERATED_RECORDS}
enableFailOnError = false
enableFailOnError = ${?ENABLE_FAIL_ON_ERROR}
}
folders {
generatedPlanAndTaskFolderPath = "/opt/app/custom/generated"
generatedPlanAndTaskFolderPath = ${?GENERATED_PLAN_AND_TASK_FOLDER_PATH}
planFilePath = "/opt/app/custom/generated/plan/postgres-plan.yaml"
planFilePath = ${?PLAN_FILE_PATH}
taskFolderPath = "/opt/app/custom/generated/task"
taskFolderPath = ${?TASK_FOLDER_PATH}
recordTrackingFolderPath = "/opt/app/custom/recordTracking"
recordTrackingFolderPath = ${?RECORD_TRACKING_FOLDER_PATH}
generatedDataResultsFolderPath = "/opt/app/custom/report/dvd"
generatedDataResultsFolderPath = ${?GENERATED_DATA_METADATA_FOLDER_PATH}
}
metadata {
numRecordsFromDataSource = 10000
numRecordsForAnalysis = 10000
oneOfDistinctCountVsCountThreshold = 0.1
}
generation {}
spark {
master = "local[*]"
master = ${?SPARK_MASTER}
config {
"spark.sql.cbo.enabled" = "true"
"spark.sql.adaptive.enabled" = "true"
"spark.sql.cbo.planStats.enabled" = "true"
"spark.sql.legacy.allowUntypedScalaUDF" = "true"
"spark.sql.statistics.histogram.enabled" = "true"
"spark.sql.shuffle.partitions" = "10"
"spark.sql.catalog.postgres" = ""
"spark.sql.catalog.cassandra" = "com.datastax.spark.connector.datasource.CassandraCatalog"
"spark.hadoop.fs.s3a.directory.marker.retention" = "keep"
"spark.hadoop.fs.s3a.bucket.all.committer.magic.enabled" = "true"
}
}
jdbc {
postgresDvd {
url = "jdbc:postgresql://postgresdvdserver:5432/dvdrental"
url = ${?POSTGRES_URL}
user = "postgres"
user = ${?POSTGRES_USER}
password = "postgres"
password = ${?POSTGRES_PASSWORD}
driver = "org.postgresql.Driver"
}
}