Skip to content

Commit

Permalink
feat(mysql): 集群标准化 close #933
Browse files Browse the repository at this point in the history
  • Loading branch information
xfwduke committed Sep 1, 2023
1 parent a9c6fb9 commit f986f9f
Show file tree
Hide file tree
Showing 10 changed files with 373 additions and 21 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -195,6 +195,10 @@ func (c *DeployMySQLCrondComp) Start() (err error) {
*/
errChan := make(chan error)
go func() {
// 重装的时候无脑尝试关闭一次
_, _ = http.Get("http://127.0.0.1:9999/quit")
time.Sleep(15 * time.Second)

cmd := exec.Command(
"su", "-", "mysql", "-c",
fmt.Sprintf(
Expand All @@ -214,7 +218,7 @@ func (c *DeployMySQLCrondComp) Start() (err error) {

started := false
LabelSelectLoop:
for i := 1; i <= 10; i++ {
for i := 1; i <= 30; i++ {
select {
case err := <-errChan:
if err != nil {
Expand All @@ -241,22 +245,8 @@ LabelSelectLoop:
}

// 关闭前台启动的 mysql-crond
resp, err := http.Get("http://127.0.0.1:9999/quit")
if err != nil {
logger.Error("call quit failed: %s", err.Error())
return err
}
defer func() {
_ = resp.Body.Close()
}()

if resp.StatusCode != 200 {
err := errors.Errorf("quit api err: %s", err.Error())
logger.Error(err.Error())
return err
}
_, _ = http.Get("http://127.0.0.1:9999/quit")

// quit 要等10s
time.Sleep(15 * time.Second)

// 确认监听端口已经关闭
Expand Down
2 changes: 0 additions & 2 deletions dbm-services/mysql/db-tools/dbactuator/pkg/core/cst/mysql.go
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,6 @@ const (
BinLogFileMatch = `(.*)/binlog\d*.bin`
// ReBinlogFilename binlog 文件名
ReBinlogFilename = `binlog\d*\.\d+$`
// DatadirMatch 实例数据目录模式
DatadirMatch = `(.*)/mysqldata/\d+$`
// MysqlOsUserName 系统帐号
MysqlOsUserName = "mysql"
// MysqlOsUserGroup 系统组
Expand Down
5 changes: 2 additions & 3 deletions dbm-services/mysql/db-tools/mysql-crond/cmd/root.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ import (
"os"
"path"
"sync"
"time"

"dbm-services/mysql/db-tools/mysql-crond/pkg/config"
"dbm-services/mysql/db-tools/mysql-crond/pkg/crond"
Expand Down Expand Up @@ -62,8 +61,8 @@ var rootCmd = &cobra.Command{

go func() {
<-quit
time.Sleep(10 * time.Second)
crond.Stop()
//time.Sleep(10 * time.Second)
//crond.Stop()
slog.Info("quit mysql-crond")
os.Exit(0)
}()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,11 +53,13 @@ def get_mysql_surrounding_apps_package():
checksum_pkg = Package.get_latest_package(version=MediumEnum.Latest, pkg_type=MediumEnum.MySQLChecksum)
rotate_binlog = Package.get_latest_package(version=MediumEnum.Latest, pkg_type=MediumEnum.MySQLRotateBinlog)
mysql_monitor_pkg = Package.get_latest_package(version=MediumEnum.Latest, pkg_type=MediumEnum.MySQLMonitor)
mysql_crond_pkg = Package.get_latest_package(version=MediumEnum.Latest, pkg_type=MediumEnum.MySQLCrond)
return [
f"{env.BKREPO_PROJECT}/{env.BKREPO_BUCKET}/{db_backup_pkg.path}",
f"{env.BKREPO_PROJECT}/{env.BKREPO_BUCKET}/{checksum_pkg.path}",
f"{env.BKREPO_PROJECT}/{env.BKREPO_BUCKET}/{rotate_binlog.path}",
f"{env.BKREPO_PROJECT}/{env.BKREPO_BUCKET}/{mysql_monitor_pkg.path}",
f"{env.BKREPO_PROJECT}/{env.BKREPO_BUCKET}/{mysql_crond_pkg.path}",
]

def mysql_install_package(self, db_version: str) -> list:
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,281 @@
# -*- coding: utf-8 -*-
"""
TencentBlueKing is pleased to support the open source community by making 蓝鲸智云-DB管理系统(BlueKing-BK-DBM) available.
Copyright (C) 2017-2023 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at https://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""

import copy
import logging
from collections import defaultdict
from dataclasses import asdict
from typing import Dict, List, Optional

from django.utils.translation import ugettext as _

from backend.configuration.constants import DBType
from backend.db_meta.models import Cluster
from backend.flow.engine.bamboo.scene.common.builder import Builder, SubBuilder, SubProcess
from backend.flow.engine.bamboo.scene.common.get_file_list import GetFileList
from backend.flow.plugins.components.collections.mysql.cluster_standardize_trans_module import (
ClusterStandardizeTransModuleComponent,
)
from backend.flow.plugins.components.collections.mysql.exec_actuator_script import ExecuteDBActuatorScriptComponent
from backend.flow.plugins.components.collections.mysql.trans_flies import TransFileComponent
from backend.flow.utils.mysql.mysql_act_dataclass import DownloadMediaKwargs, ExecActuatorKwargs
from backend.flow.utils.mysql.mysql_act_playload import MysqlActPayload

logger = logging.getLogger("flow")


class MySQLHAStandardizeFlow(object):
def __init__(self, root_id: str, data: Optional[Dict]):
self.root_id = root_id
self.data = data

def standardize(self):
"""
self.data = {
"uid": "20230830",
"created_by": "xxx",
"bk_biz_id": "11",
"ticket_type": "MYSQL_HA_STANDARDIZE",
"infos": {
"cluster_ids": [1, 2, 3],
}
}
"""
cluster_objects = Cluster.objects.filter(pk__in=self.data["infos"]["cluster_ids"])
if len(cluster_objects) != len(self.data["infos"]["cluster_ids"]):
pass # ToDo

standardize_pipe = Builder(root_id=self.root_id, data=self.data)
standardize_pipe.add_parallel_sub_pipeline(
sub_flow_list=[
self._build_trans_module_sub(clusters=cluster_objects),
self._build_proxy_sub(clusters=cluster_objects),
self._build_storage_sub(clusters=cluster_objects),
]
)
logger.info(_("构建TenDBHA集群标准化流程成功"))
standardize_pipe.run_pipeline()

def _build_trans_module_sub(self, clusters: List[Cluster]) -> SubProcess:
pipes = []
for cluster in clusters:
cluster_pipe = SubBuilder(
root_id=self.root_id, data={**copy.deepcopy(self.data), "cluster_id": cluster.id}
)
cluster_pipe.add_act(
act_name=_(""), act_component_code=ClusterStandardizeTransModuleComponent.code, kwargs={}
)

pipes.append(cluster_pipe.build_sub_process(sub_name=_("{} CC 模块标准化".format(cluster.immute_domain))))

p = SubBuilder(root_id=self.root_id, data=self.data)
p.add_parallel_sub_pipeline(sub_flow_list=pipes)
return p.build_sub_process(sub_name=_("CC标准化"))

def _build_proxy_sub(self, clusters: List[Cluster]) -> SubProcess:
ip_cluster_map = defaultdict(list)
for cluster in clusters:
for ins in cluster.proxyinstance_set.all():
# 集群的 N 个接入层实例肯定在不同的 N 台机器上
# 一个实例肯定只是一个集群的接入层
# 所以这个列表不会有重复值
ip_cluster_map[ins.machine.ip].append(cluster)

pipes = []
for ip, relate_clusters in ip_cluster_map.items():
bk_cloud_id = relate_clusters[0].bk_cloud_id
cluster_type = relate_clusters[0].cluster_type
pipe = SubBuilder(root_id=self.root_id, data=self.data)

pipe.add_act(
act_name=_("下发MySQL周边程序介质"),
act_component_code=TransFileComponent.code,
kwargs=asdict(
DownloadMediaKwargs(
bk_cloud_id=bk_cloud_id,
exec_ip=ip,
file_list=GetFileList(db_type=DBType.MySQL).get_mysql_surrounding_apps_package(),
)
),
)

pipe.add_act(
act_name=_("下发actuator介质"),
act_component_code=TransFileComponent.code,
kwargs=asdict(
DownloadMediaKwargs(
bk_cloud_id=bk_cloud_id,
exec_ip=ip,
file_list=GetFileList(db_type=DBType.MySQL).get_db_actuator_package(),
)
),
)

pipe.add_act(
act_name=_("部署mysql-crond"),
act_component_code=ExecuteDBActuatorScriptComponent.code,
kwargs=asdict(
ExecActuatorKwargs(
exec_ip=ip,
bk_cloud_id=bk_cloud_id,
get_mysql_payload_func=MysqlActPayload.get_deploy_mysql_crond_payload.__name__,
cluster_type=cluster_type,
)
),
)

pipe.add_act(
act_name=_("部署监控程序"),
act_component_code=ExecuteDBActuatorScriptComponent.code,
kwargs=asdict(
ExecActuatorKwargs(
exec_ip=ip,
bk_cloud_id=bk_cloud_id,
get_mysql_payload_func=MysqlActPayload.get_deploy_mysql_monitor_payload.__name__,
cluster_type=cluster_type,
)
),
)

pipes.append(
pipe.build_sub_process(
sub_name=_("{} 部署dba工具".format("\n".join([ele.immute_domain for ele in relate_clusters])))
)
)

p = SubBuilder(root_id=self.root_id, data=self.data)
p.add_parallel_sub_pipeline(sub_flow_list=pipes)

return p.build_sub_process(sub_name=_("接入层标准化"))

def _build_storage_sub(self, clusters: List[Cluster]) -> SubProcess:
ip_cluster_map = defaultdict(list)
for cluster in clusters:
for ins in cluster.storageinstance_set.all():
ip_cluster_map[ins.machine.ip].append(cluster)

pipes = []
for ip, relate_clusters in ip_cluster_map.items():
bk_cloud_id = relate_clusters[0].bk_cloud_id
cluster_type = relate_clusters[0].cluster_type
pipe = SubBuilder(root_id=self.root_id, data=self.data)

pipe.add_act(
act_name=_("下发MySQL周边程序介质"),
act_component_code=TransFileComponent.code,
kwargs=asdict(
DownloadMediaKwargs(
bk_cloud_id=bk_cloud_id,
exec_ip=ip,
file_list=GetFileList(db_type=DBType.MySQL).get_mysql_surrounding_apps_package(),
)
),
)

pipe.add_act(
act_name=_("下发actuator介质"),
act_component_code=TransFileComponent.code,
kwargs=asdict(
DownloadMediaKwargs(
bk_cloud_id=bk_cloud_id,
exec_ip=ip,
file_list=GetFileList(db_type=DBType.MySQL).get_db_actuator_package(),
)
),
)

pipe.add_act(
act_name=_("部署mysql-crond"),
act_component_code=ExecuteDBActuatorScriptComponent.code,
kwargs=asdict(
ExecActuatorKwargs(
exec_ip=ip,
bk_cloud_id=bk_cloud_id,
get_mysql_payload_func=MysqlActPayload.get_deploy_mysql_crond_payload.__name__,
cluster_type=cluster_type,
)
),
)

pipe.add_act(
act_name=_("部署监控程序"),
act_component_code=ExecuteDBActuatorScriptComponent.code,
kwargs=asdict(
ExecActuatorKwargs(
exec_ip=ip,
bk_cloud_id=bk_cloud_id,
get_mysql_payload_func=MysqlActPayload.get_deploy_mysql_monitor_payload.__name__,
cluster_type=cluster_type,
)
),
)

pipe.add_act(
act_name=_("部署备份程序"),
act_component_code=ExecuteDBActuatorScriptComponent.code,
kwargs=asdict(
ExecActuatorKwargs(
exec_ip=ip,
bk_cloud_id=bk_cloud_id,
get_mysql_payload_func=MysqlActPayload.get_install_db_backup_payload.__name__,
cluster_type=cluster_type,
)
),
)

pipe.add_act(
act_name=_("部署rotate binlog"),
act_component_code=ExecuteDBActuatorScriptComponent.code,
kwargs=asdict(
ExecActuatorKwargs(
exec_ip=ip,
bk_cloud_id=bk_cloud_id,
get_mysql_payload_func=MysqlActPayload.get_install_mysql_rotatebinlog_payload.__name__,
cluster_type=cluster_type,
)
),
)

pipe.add_act(
act_name=_("部署数据校验程序"),
act_component_code=ExecuteDBActuatorScriptComponent.code,
kwargs=asdict(
ExecActuatorKwargs(
exec_ip=ip,
bk_cloud_id=bk_cloud_id,
get_mysql_payload_func=MysqlActPayload.get_install_mysql_checksum_payload.__name__,
cluster_type=cluster_type,
)
),
)

pipe.add_act(
act_name=_("部署DBA工具箱"),
act_component_code=ExecuteDBActuatorScriptComponent.code,
kwargs=asdict(
ExecActuatorKwargs(
bk_cloud_id=bk_cloud_id,
exec_ip=ip,
get_mysql_payload_func=MysqlActPayload.get_install_dba_toolkit_payload.__name__,
cluster_type=cluster_type,
)
),
)

pipes.append(
pipe.build_sub_process(
sub_name=_("{} 部署dba工具".format("\n".join([ele.immute_domain for ele in relate_clusters])))
)
)

p = SubBuilder(root_id=self.root_id, data=self.data)
p.add_parallel_sub_pipeline(sub_flow_list=pipes)
return p.build_sub_process(sub_name=_("存储层标准化"))
5 changes: 5 additions & 0 deletions dbm-ui/backend/flow/engine/controller/mysql.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
from backend.flow.engine.bamboo.scene.mysql.mysql_ha_disable_flow import MySQLHADisableFlow
from backend.flow.engine.bamboo.scene.mysql.mysql_ha_enable_flow import MySQLHAEnableFlow
from backend.flow.engine.bamboo.scene.mysql.mysql_ha_full_backup_flow import MySQLHAFullBackupFlow
from backend.flow.engine.bamboo.scene.mysql.mysql_ha_standardize_flow import MySQLHAStandardizeFlow
from backend.flow.engine.bamboo.scene.mysql.mysql_master_fail_over import MySQLMasterFailOverFlow
from backend.flow.engine.bamboo.scene.mysql.mysql_master_slave_switch import MySQLMasterSlaveSwitchFlow
from backend.flow.engine.bamboo.scene.mysql.mysql_migrate_cluster_flow import MySQLMigrateClusterFlow
Expand Down Expand Up @@ -496,3 +497,7 @@ def mysql_single_rename_database_scene(self):
root_id=self.root_id, data=self.ticket_data, cluster_type=ClusterType.TenDBSingle.value
)
flow.rename_database()

def mysql_ha_standardize_scene(self):
flow = MySQLHAStandardizeFlow(root_id=self.root_id, data=self.ticket_data)
flow.standardize()
Loading

0 comments on commit f986f9f

Please sign in to comment.