Skip to content

Commit

Permalink
fix 123
Browse files Browse the repository at this point in the history
  • Loading branch information
ymakedaq committed Sep 24, 2024
1 parent 8c27fd2 commit 31cd5ab
Showing 1 changed file with 30 additions and 15 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from backend.configuration.constants import DBType
from backend.constants import IP_PORT_DIVIDER
from backend.db_meta.enums import InstanceInnerRole, InstanceStatus
from backend.db_meta.models import Cluster, StorageInstance
from backend.db_meta.models import Cluster
from backend.db_package.models import Package
from backend.flow.consts import MediumEnum
from backend.flow.engine.bamboo.scene.common.builder import Builder, SubBuilder
Expand All @@ -36,6 +36,7 @@
from backend.flow.plugins.components.collections.mysql.mysql_db_meta import MySQLDBMetaComponent
from backend.flow.plugins.components.collections.mysql.trans_flies import TransFileComponent
from backend.flow.utils.common_act_dataclass import DownloadBackupClientKwargs
from backend.flow.utils.mysql.common.mysql_cluster_info import get_ports, get_version_and_charset
from backend.flow.utils.mysql.mysql_act_dataclass import (
ClearMachineKwargs,
DBMetaOPKwargs,
Expand Down Expand Up @@ -72,13 +73,13 @@ def upgrade(self):
"uid": "2022051612120001",
"created_by": "xxxx",
"bk_biz_id": "152",
"module": 1,
"ticket_type": "MYSQL_RESTORE_SLAVE",
"backup_source": "local",
"infos": [
{
"cluster_id": "1001",
"cluster_ids": [1001,1002],
"pkg_id": 123,
"new_db_module_id: "578",
"old_slave": {
"bk_biz_id": 200005000,
"bk_cloud_id": 0,
Expand All @@ -98,24 +99,19 @@ def upgrade(self):
"""
cluster_ids = []
for info in self.ticket_data["infos"]:
cluster_ids.append(info["cluster_id"])
cluster_ids.extend(info["cluster_ids"])

p = Builder(
root_id=self.root_id,
data=copy.deepcopy(self.ticket_data),
need_random_pass_cluster_ids=list(set(cluster_ids)),
)
subflows = []
created_by = self.ticket_data["ticket_type"]
for info in self.ticket_data["infos"]:
cluster = Cluster.objects.get(id=info["cluster_id"])
old_slave_ip = info["old_slave"]["ip"]
bk_cloud_id = info["old_slave"]["bk_cloud_id"]
slave_storage = StorageInstance.objects.filter(machine__ip=old_slave_ip, machine__bk_cloud_id=bk_cloud_id)
ports = []
for ins in slave_storage:
ports.append(ins.port)

relation_cluster_ids = [info["cluster_id"]]
ports = get_ports(info["cluster_ids"])
relation_cluster_ids = [info["cluster_ids"]]

subflow = non_standby_slaves_upgrade_subflow(
uid=str(self.ticket_data["uid"]),
Expand All @@ -128,6 +124,7 @@ def upgrade(self):
relation_cluster_ids=relation_cluster_ids,
pkg_id=info["pkg_id"],
backup_source=self.ticket_data["backup_source"],
created_by=created_by,
)
subflows.append(subflow)

Expand All @@ -146,17 +143,35 @@ def non_standby_slaves_upgrade_subflow(
add_slave_only: bool,
relation_cluster_ids: list,
pkg_id: int,
new_db_module_id: int,
backup_source: str,
created_by: str,
):
"""
一主多从非stanby slaves升级subflow
"""
parent_global_data = {"uid": uid, "root_id": root_id}
pkg = Package.objects.get(id=pkg_id, pkg_type=MediumEnum.MySQL, db_type=DBType.MySQL)
charset, _ = get_version_and_charset(
cluster.bk_biz_id, db_module_id=new_db_module_id, cluster_type=cluster.cluster_type
)
parent_global_data = {
"uid": uid,
"root_id": root_id,
"bk_biz_id": cluster.bk_biz_id,
"bk_cloud_id": cluster.bk_cloud_id,
"db_module_id": new_db_module_id,
"time_zone": cluster.time_zone,
"cluster_type": cluster.cluster_type,
"created_by": created_by,
"package": pkg.name,
"ports": ports,
"charset": charset,
}

sub_pipeline = SubBuilder(root_id=root_id, data=parent_global_data)
new_slave_ip = new_slave["ip"]
bk_host_ids = [new_slave["bk_host_id"]]
old_slave_ip = old_slave["ip"]
pkg = Package.objects.get(id=pkg_id, pkg_type=MediumEnum.MySQL, db_type=DBType.MySQL)
db_config = get_instance_config(cluster.bk_cloud_id, old_slave_ip, ports=ports)

# 安装mysql
Expand All @@ -168,7 +183,7 @@ def non_standby_slaves_upgrade_subflow(
if backup_source == MySQLBackupSource.LOCAL:
# 使用本地备份来做迁移
sync_data_sub_pipeline_list = build_data_repl_from_local_backup(
root_id, parent_global_data, relation_cluster_ids, cluster, new_slave_ip
root_id, parent_global_data, relation_cluster_ids, cluster, new_slave_ip, charset
)
sub_pipeline.add_sub_pipeline(sub_flows=sync_data_sub_pipeline_list)
elif backup_source == MySQLBackupSource.REMOTE:
Expand Down

0 comments on commit 31cd5ab

Please sign in to comment.