This repository has been archived by the owner on Feb 17, 2025. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathdocker-compose.yml
143 lines (133 loc) · 3.5 KB
/
docker-compose.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
version: '3.1'
services:
kafka:
image: lensesio/box
container_name: kafka
restart: always
ports:
- "9092:9092"
- "3030:3030"
- "2181:2181"
environment:
EULA: "https://dl.lenses.io/d/?id=<Lenses token>"
ADV_HOST: <Your Ip address>
SAMPLEDATA: 0
volumes:
- /var/run/docker.sock:/var/run/docker.sock
db:
image: mysql
container_name: database
command: --default-authentication-plugin=mysql_native_password
restart: always
volumes:
- ./database:/docker-entrypoint-initdb.d
- db:/var/lib/mysql
ports:
- "3306:3306"
environment:
MYSQL_ROOT_PASSWORD: streamsets
redis:
image: redis
container_name: redis
ports:
- "6379:6379"
### ###########################################
### Spark
### ###########################################
master:
build:
context: ./spark
dockerfile: ./Dockerfile
command: bin/spark-class org.apache.spark.deploy.master.Master -h master
hostname: master
environment:
MASTER: spark://master:7077
SPARK_CONF_DIR: /conf
SPARK_PUBLIC_DNS: localhost
expose:
- 7001
- 7002
- 7003
- 7004
- 7005
- 7077
- 6066
ports:
- 4040:4040
- 6066:6066
- 7077:7077
- 8080:8080
volumes:
- ./spark/conf/master:/conf
- ./transformer/data:/tmp/data
worker:
build:
context: ./spark
dockerfile: ./Dockerfile
command: bin/spark-class org.apache.spark.deploy.worker.Worker spark://master:7077
hostname: worker
environment:
SPARK_CONF_DIR: /conf
SPARK_WORKER_CORES: 2
SPARK_WORKER_MEMORY: 1g
SPARK_WORKER_PORT: 8881
SPARK_WORKER_WEBUI_PORT: 8081
SPARK_PUBLIC_DNS: localhost
links:
- master
expose:
- 7012
- 7013
- 7014
- 7015
- 8881
ports:
- 8081:8081
volumes:
- ./spark/conf/worker:/conf
- ./transformer/data:/tmp/data
- spark_home:/usr/spark
- hadoop:/usr/hadoop-3.2.0
transformer:
build:
context: ./transformer
dockerfile: Dockerfile
args:
- SDC_VERSION=latest
environment:
SPARK_HOME: /usr/spark
SPARK_DIST_CLASSPATH: /usr/hadoop-3.2.0/etc/hadoop/*:/usr/hadoop-3.2.0/share/hadoop/common/lib/*:/usr/hadoop-3.2.0/share/hadoop/common/*:/usr/hadoop-3.2.0/share/hadoop/hdfs/*:/usr/hadoop-3.2.0/share/hadoop/hdfs/lib/*:/usr/hadoop-3.2.0/share/hadoop/hdfs/*:/usr/hadoop-3.2.0/share/hadoop/yarn/lib/*:/usr/hadoop-3.2.0/share/hadoop/yarn/*:/usr/hadoop-3.2.0/share/hadoop/mapreduce/lib/*:/usr/hadoop-3.2.0/share/hadoop/mapreduce/*:/usr/hadoop-3.2.0/share/hadoop/tools/lib/*
ports:
- 19630:19630
volumes:
- ./transformer/data:/tmp/data
- ./transformer/sdt-data:/data/st
- spark_home:/usr/spark
- hadoop:/usr/hadoop-3.2.0
sdc:
build:
context: ./sdc
dockerfile: Dockerfile
args:
- SDC_VERSION=3.13.0
- SDC_LIBS=streamsets-datacollector-jdbc-lib,streamsets-datacollector-redis-lib,streamsets-datacollector-groovy_2_4-lib,streamsets-datacollector-apache-kafka_2_0-lib,streamsets-datacollector-orchestrator-lib
restart: always
volumes:
- ./sdc/data:/data
- ./sdc/resources:/resources
expose:
- 8000
ports:
- 18630:18630
- 8000:8000
dashboard:
build:
context: ./web
dockerfile: Dockerfile
restart: always
ports:
- 80:80
volumes:
spark_home:
hadoop:
db: