forked from flant/loghouse
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Rakefile
58 lines (46 loc) · 2.04 KB
/
Rakefile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
require_relative 'application'
# Dir.glob('lib/tasks/*.rake').each { |r| load r }
TRUE_VALUES = %w[1 on true]
Time.zone = Loghouse::TIME_ZONE
task :create_logs_tables do
force = TRUE_VALUES.include?(ENV['FORCE'])
do_db_deploy = TRUE_VALUES.include?(ENV['DO_DB_DEPLOY'])
next unless do_db_deploy
db_version = 0
if ::Clickhouse.connection.exists_table(LogsTables::DB_VERSION_TABLE)
db_version = ::Clickhouse.connection.query("SELECT MAX(version) AS version FROM #{LogsTables::DB_VERSION_TABLE}")[0][0]
end
Log.log "Got db version #{db_version}. Expected version #{LogsTables::DB_VERSION}"
case db_version
when 0..2
Log.log "Run migration from db version < #{LogsTables::DB_VERSION}"
LogsTables.create_storage_table(force: true)
LogsTables.create_buffer_table(force: force)
LogsTables.create_migration_table(force: force)
::Clickhouse.connection.execute "INSERT INTO #{LogsTables::DB_VERSION_TABLE} VALUES (NOW(), #{LogsTables::DB_VERSION})"
Log.log "Migration done"
when 3
Log.log "Run migration for version #{LogsTables::DB_VERSION}"
::Clickhouse.connection.execute "ALTER TABLE #{LogsTables::TABLE_NAME} MODIFY TTL date + INTERVAL #{LogsTables::RETENTION_PERIOD} DAY DELETE"
Log.log "Migration done"
else
Log.log "Unknown version #{db_version}. Nothing to do."
end
end
task :insert_fixtures do
s = CSV.generate do |csv|
csv << [LogsTables::TIMESTAMP_ATTRIBUTE, LogsTables::NSEC_ATTRIBUTE, *LogsTables::KUBERNETES_ATTRIBUTES.keys,
'labels.names', 'labels.values',
'string_fields.names', 'string_fields.values','number_fields.names', 'number_fields.values',
'boolean_fields.names', 'boolean_fields.values', 'null_fields.names']
CSV.foreach('fixtures/fake_data.tsv', col_sep: "\t") do |r|
time = rand(Range.new((LogsTables::RETENTION_PERIOD * 24).hours.ago.utc, Time.zone.now.utc))
o = []
o << time.strftime('%Y-%m-%d %H:%M:%S')
o << rand(10**6).to_i
o += r
csv << o
end
end
Clickhouse.connection.insert_rows(LogsTables::TABLE_NAME, csv: s)
end