diff --git a/dbm-services/redis/db-tools/dbactuator/pkg/atomjobs/atomredis/redis_backup.go b/dbm-services/redis/db-tools/dbactuator/pkg/atomjobs/atomredis/redis_backup.go index 4f5394acb9..45df7a4131 100644 --- a/dbm-services/redis/db-tools/dbactuator/pkg/atomjobs/atomredis/redis_backup.go +++ b/dbm-services/redis/db-tools/dbactuator/pkg/atomjobs/atomredis/redis_backup.go @@ -227,7 +227,7 @@ func (job *RedisBackup) GetBackupClient() (err error) { bkTag = consts.RedisForeverBackupTAG } // job.backupClient = backupsys.NewIBSBackupClient(consts.IBSBackupClient, bkTag) - job.backupClient, err = backupsys.NewCosBackupClient(consts.COSBackupClient, "", bkTag) + job.backupClient, err = backupsys.NewCosBackupClient(consts.COSBackupClient, consts.COSInfoFile, bkTag) return } diff --git a/dbm-services/redis/db-tools/dbactuator/pkg/consts/consts.go b/dbm-services/redis/db-tools/dbactuator/pkg/consts/consts.go index 02b11b418f..5582cdbd69 100644 --- a/dbm-services/redis/db-tools/dbactuator/pkg/consts/consts.go +++ b/dbm-services/redis/db-tools/dbactuator/pkg/consts/consts.go @@ -157,6 +157,7 @@ const ( ForeverBackupType = "forever_backup" IBSBackupClient = "/usr/local/bin/backup_client" COSBackupClient = "/usr/local/backup_client/bin/backup_client" + COSInfoFile = "/home/mysql/.cosinfo.toml" BackupTarSplitSize = "8G" RedisFullBackupTAG = "REDIS_FULL" RedisBinlogTAG = "REDIS_BINLOG" diff --git a/dbm-ui/backend/flow/plugins/components/collections/redis/redis_dts.py b/dbm-ui/backend/flow/plugins/components/collections/redis/redis_dts.py index 4a3579282b..5a13d64f27 100644 --- a/dbm-ui/backend/flow/plugins/components/collections/redis/redis_dts.py +++ b/dbm-ui/backend/flow/plugins/components/collections/redis/redis_dts.py @@ -10,7 +10,6 @@ """ import base64 import datetime -import hashlib import logging import re import traceback @@ -855,7 +854,7 @@ def __new_data_check_repair_job(self, global_data: dict, dts_job: TbTendisDTSJob ], } self.log_info(f"new_data_check_repair_job ticket_data:{ticket_data}") - root_id = uuid.uuid1().hex + root_id = f"{datetime.date.today()}{uuid.uuid1().hex[:6]}".replace("-", "") flow = RedisClusterDataCheckRepairFlow(root_id=root_id, data=ticket_data) flow.redis_cluster_data_check_repair_flow() self.log_info(f"new_data_check_repair_job flow_id:{root_id}") @@ -919,7 +918,7 @@ def _execute(self, data, parent_data): "resource_spec": kwargs["cluster"]["dst_install_param"]["resource_spec"], } self.log_info("NewDstClusterInstallJobAndWatchStatus ticket_data==>:{}".format(ticket_data)) - root_id = uuid.uuid1().hex + root_id = f"{datetime.date.today()}{uuid.uuid1().hex[:6]}".replace("-", "") if ticket_data["cluster_type"] == ClusterType.TendisPredixyTendisplusCluster.value: flow = TendisPlusApplyFlow(root_id=root_id, data=ticket_data) flow.deploy_tendisplus_cluster_flow() @@ -1021,7 +1020,7 @@ def _execute(self, data, parent_data): ], } self.log_info("NewDstClusterFlushJobAndWatchStatus ticket_data==>:{}".format(ticket_data)) - root_id = uuid.uuid1().hex + root_id = f"{datetime.date.today()}{uuid.uuid1().hex[:6]}".replace("-", "") flow = RedisFlushDataFlow(root_id=root_id, data=ticket_data) flow.redis_flush_data_flow() @@ -1115,7 +1114,7 @@ def _execute(self, data, parent_data): self.log_info(f"new_dts_online_switch_job ticket_data:{ticket_data}") from backend.flow.engine.bamboo.scene.redis.redis_cluster_data_copy import RedisClusterDataCopyFlow - root_id = uuid.uuid1().hex + root_id = f"{datetime.date.today()}{uuid.uuid1().hex[:6]}".replace("-", "") flow = RedisClusterDataCopyFlow(root_id=root_id, data=ticket_data) flow.online_switch_flow() @@ -1317,7 +1316,7 @@ def _execute(self, data, parent_data): "force": False, } self.log_info(f"redis_cluster_close dst_cluster:{job_row.dst_cluster} ticket_data:{ticket_data}") - root_id = uuid.uuid1().hex + root_id = f"{datetime.date.today()}{uuid.uuid1().hex[:6]}".replace("-", "") flow = RedisClusterOpenCloseFlow(root_id=root_id, data=ticket_data) flow.redis_cluster_open_close_flow() @@ -1410,7 +1409,7 @@ def _execute(self, data, parent_data): "cluster_id": job_row.dst_cluster_id, } self.log_info(f"redis_cluster_shutdown ticket_data:{ticket_data}") - root_id = uuid.uuid1().hex + root_id = f"{datetime.date.today()}{uuid.uuid1().hex[:6]}".replace("-", "") flow = RedisClusterShutdownFlow(root_id=root_id, data=ticket_data) flow.redis_cluster_shutdown_flow()