Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

upadte #7

Open
wants to merge 7 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -14,36 +14,6 @@ def __init__(self, status_code, body):
DisResponse.__init__(self, status_code, body)


#
# class disApplistResponse(DisResponse):
# def __init__(self, status_code, body, total_number):
# app_list = []
# if 'apps' in body.keys():
# for i in body.get('apps'):
# app_list.append(i.get('app_name'))
# body.setdefault('apps_list', app_list)
# body.setdefault('total_number', total_number)
#
# import copy
# new_body = copy.deepcopy(body)
# if 'apps' in new_body.keys():
# del new_body['apps']
# if 'hasMoreApp' in new_body.keys():
# del new_body['hasMoreApp']
#
# new_body.setdefault('appinfo_list', body.get('apps'))
# new_body.setdefault('has_more_app', body.get('hasMoreApp'))
#
# else:
# import copy
# new_body = copy.deepcopy(body)
# if 'hasMoreApp' in new_body.keys():
# del new_body['hasMoreApp']
# new_body.setdefault('has_more_app', body.get('hasMoreApp'))
#
# DisResponse.__init__(self, status_code, new_body)
# self.body = new_body


class disApplistResponse(DisResponse):
def __init__(self, status_code, body):
Expand Down
67 changes: 1 addition & 66 deletions dis_sdk_python_demo/add_dump_task_sample.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,77 +15,12 @@
key=obs_dump_task,value=['text','obs-1253', '','yyyy', '|'])


# mrs_dump_task=['destination_file_type','mrs_cluster_id','mrs_cluster_name','mrs_hdfs_path',
# 'hdfs_prefix_folder','obs_bucket_path','file_prefix']
# mrs_Schema = DumpTask.setSchema(basic_Schema=basic_Schema,
# key=mrs_dump_task,value=['text','7d811b1b-cb40-4ddc-8995-8a992251cd44', 'mrs_7wu8','/app-logs',
# '','obs-1253',''])


# dli_dump_task=['dli_database_name','dli_table_name','obs_bucket_path','file_prefix']
# dli_Schema = DumpTask.setSchema(basic_Schema=basic_Schema,
# key=dli_dump_task,value=['a','abcd_12345','obs-1253',''])


# dws_dump_task=['dws_cluster_name','dws_cluster_id','dws_database_name',
# 'dws_schema','dws_table_name','dws_delimiter',
# 'user_name','user_password',
# 'kms_user_key_name','kms_user_key_id',
# 'obs_bucket_path','file_prefix']
# dws_Schema = DumpTask.setSchema(basic_Schema=basic_Schema,
# key=dws_dump_task,value=['dis-to-dws','72d6a2e2-c6cf-4ca2-8e58-b6e3b0c89464', 'a',
# 'a','a','|',
# 'xxxx','abc#123456',
# 'dlf/default','7dbc8756-c274-4ad8-b9ae-0b43db2d3fe1',
# 'obs-1253', ''])


# CloudTable_HBase_dump_task=['cloudtable_cluster_name','cloudtable_cluster_id','cloudtable_table_name',
# 'obs_backup_bucket_path','backup_file_prefix','cloudtable_row_key_delimiter',
# 'row_key',
# 'columns']
# CloudTable_HBase_Schema = DumpTask.setSchema(basic_Schema=basic_Schema,
# key=CloudTable_HBase_dump_task,
# value=['cloudtable_cluster','b8c095e2-db5f-4732-8a1d-eacd662e35dc', 'cloudtable_table',
# 'obs-test-hz','','|',
# [{"value": "dataId","type": "String"}],
# [{"column_family_name": "cfname1","column_name": "ID","value": "dataId","type": "String"},
# {"column_family_name": "cfname2","column_name": "VALUE","value": "dataValue","type": "String"}]
# ])


# CloudTable_OpenTSDB_dump_task=['cloudtable_cluster_name','cloudtable_cluster_id',
# 'obs_backup_bucket_path', 'backup_file_prefix',
# 'metric',
# 'timestamp',
# 'value',
# 'tags']
# CloudTable_OpenTSDB_Schema = DumpTask.setSchema(basic_Schema=basic_Schema,
# key=CloudTable_OpenTSDB_dump_task,
# value=['cloudtable_cluster','b8c095e2-db5f-4732-8a1d-eacd662e35dc',
# 'obs-test-hz','',
# [{"type":"Constant",
# "value":"age"}],
# {"value":"date","type":"String", "format":"yyyy/MM/dd HH:mm:ss"},
# {"value":"value","type":"Bigint"},
# [{"name":"name","value":"name","type":"Bigint"}]
# ])

def add_dump_task_test():
cli = disclient(endpoint='', ak='', sk='', projectid='', region='')
try:
# 添加OBS转储服务,配置osb_Schema值
r = cli.add_dump_task(streamname, task_name,'OBS',obs_Schema)
# 添加MRS转储服务,配置mrs_Schema值
# r = cli.add_dump_task(streamname, task_name,'MRS',mrs_Schema)
# 添加DLI转储任务,配置dli_Schema值
# r = cli.add_dump_task(streamname, task_name, 'DLI', dli_Schema)
# 添加DWS转储任务,配置dws_Schema值
# r = cli.add_dump_task(streamname, task_name, 'DWS', dws_Schema)
# 添加CloudTable HBase转储任务,配置CloudTable_HBase_Schema值
# r = cli.add_dump_task(streamname, task_name, 'CloudTable_HBase', CloudTable_HBase_Schema)
# 添加CloudTable OpenTSDB转储任务,配置CloudTable_OpenTSDB_Schema值
# r = cli.add_dump_task(streamname, task_name, 'CloudTable_OpenTSDB', CloudTable_OpenTSDB_Schema)

print(r.statusCode)
except Exception as ex:
print(str(ex))
Expand Down
5 changes: 0 additions & 5 deletions dis_sdk_python_demo/get_cursor_sample.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,6 @@ def get_cursor_test():
try:
# startSeq与AT_SEQUENCE_NUMBER/AFTER_SEQUENCE_NUMBER搭配使用
r = cli.getCursor(stream_name, partition_id, cursorType='AT_SEQUENCE_NUMBER', startSeq="0")
# r = cli.getCursor(streamname, partitionId, cursorType='AFTER_SEQUENCE_NUMBER', startSeq="0")
# timestamp与AT_TIMESTAMP搭配使用
# r = cli.getCursor(streamname, partitionId, cursorType='AT_TIMESTAMP',timestamp=1554694135190)
# r = cli.getCursor(streamname, partitionId, cursorType='TRIM_HORIZON')
# r = cli.getCursor(streamname, partitionId, cursorType='LATEST')
print(r.statusCode)
if IS_PYTHON2:
print(json.dumps(r.body))
Expand Down
9 changes: 2 additions & 7 deletions dis_sdk_python_demo/get_records_protobuf_sample.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,7 @@ def get_records_protobuf_test():
cli = disclient(endpoint='', ak='', sk='', projectid='', region='', bodySerializeType='protobuf')
try:
r = cli.getCursor(stream_name, partitionId, cursorType='AT_SEQUENCE_NUMBER', startSeq="0")
# r = cli.getCursor(streamname, partitionId, cursorType='AFTER_SEQUENCE_NUMBER', startSeq="0")
# r = cli.getCursor(streamname, partitionId, cursorType='AT_TIMESTAMP',timestamp=1554694135190)
# r = cli.getCursor(streamname, partitionId, cursorType='TRIM_HORIZON')
# r = cli.getCursor(streamname, partitionId, cursorType='LATEST')

cursor = r.cursor
while cursor:
r = cli.getRecords(partitioncursor=cursor)
Expand All @@ -25,9 +22,7 @@ def get_records_protobuf_test():
print(json.dumps(r.body))
except:
print(r.body)
# for i in r.getRecordResult(r.recordResult):
# print("record[{}],sequenceNumber[{}],partitionKey[{}],timestamp[{}],timestamp_type[{}]".format(
# i.data, i.sequence_number, i.partitionKey,i.timestamp,i.timestamp_type))

else:break
except Exception as ex:
print(str(ex))
Expand Down
10 changes: 2 additions & 8 deletions dis_sdk_python_demo/get_records_sample.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,7 @@ def getRecords_test():
cli = disclient(endpoint='', ak='', sk='', projectid='', region='')
try:
r = cli.getCursor(stream_name, partitionId, cursorType='AT_SEQUENCE_NUMBER', startSeq="0")
# r = cli.getCursor(streamname, partitionId, cursorType='AFTER_SEQUENCE_NUMBER', startSeq="0")
# r = cli.getCursor(streamname, partitionId, cursorType='AFTER_SEQUENCE_NUMBER', startSeq="0")
# r = cli.getCursor(streamname, partitionId, cursorType='AT_TIMESTAMP',timestamp=1554694135190)
# r = cli.getCursor(streamname, partitionId, cursorType='TRIM_HORIZON')
# r = cli.getCursor(streamname, partitionId, cursorType='LATEST')

cursor = r.cursor
while cursor:
r = cli.getRecords(partitioncursor=cursor)
Expand All @@ -26,9 +22,7 @@ def getRecords_test():
print(json.dumps(r.body))
except:
print(r.body)
# for i in r.getRecordResult(r.recordResult):
# print("record[{}],sequenceNumber[{}],partitionKey[{}],timestamp[{}],timestamp_type[{}]".format(
# i.data, i.sequence_number, i.partitionKey,i.timestamp,i.timestamp_type))

else:
break

Expand Down