forked from mikepm35/TfLambdaDemo
-
Notifications
You must be signed in to change notification settings - Fork 0
/
train.py
95 lines (73 loc) · 2.48 KB
/
train.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
try:
import unzip_requirements
except ImportError:
pass
import os
import json
import time
import functools
import tarfile
import boto3
import tensorflow as tf
import census_data
FILE_DIR = '/tmp/'
BUCKET = os.environ['BUCKET']
# FILE_DIR = './'
# BUCKET = 'tflambdademo'
def trainHandler(event, context):
time_start = time.time()
body = json.loads(event.get('body'))
# Read in epoch
epoch_files = body['epoch']
# Download files from S3
boto3.Session(
).resource('s3'
).Bucket(BUCKET
).download_file(
os.path.join(epoch_files,census_data.TRAINING_FILE),
FILE_DIR+census_data.TRAINING_FILE)
boto3.Session(
).resource('s3'
).Bucket(BUCKET
).download_file(
os.path.join(epoch_files,census_data.EVAL_FILE),
FILE_DIR+census_data.EVAL_FILE)
# Create feature columns
wide_cols, deep_cols = census_data.build_model_columns()
# Setup estimator
classifier = tf.estimator.LinearClassifier(
feature_columns=wide_cols,
model_dir=FILE_DIR+'model_'+epoch_files+'/')
# Create callable input function and execute train
train_inpf = functools.partial(
census_data.input_fn,
FILE_DIR+census_data.TRAINING_FILE,
num_epochs=2, shuffle=True,
batch_size=64)
classifier.train(train_inpf)
# Create callable input function and execute evaluation
test_inpf = functools.partial(
census_data.input_fn,
FILE_DIR+census_data.EVAL_FILE,
num_epochs=1, shuffle=False,
batch_size=64)
result = classifier.evaluate(test_inpf)
print('Evaluation result: %s' % result)
# Zip up model files and store in s3
with tarfile.open(FILE_DIR+'model.tar.gz', mode='w:gz') as arch:
arch.add(FILE_DIR+'model_'+epoch_files+'/', recursive=True)
boto3.Session(
).resource('s3'
).Bucket(BUCKET
).Object(os.path.join(epoch_files,'model.tar.gz')
).upload_file(FILE_DIR+'model.tar.gz')
# Convert result from float32 for json serialization
for key in result:
result[key] = result[key].item()
response = {
"statusCode": 200,
"body": json.dumps({'epoch': epoch_files,
'runtime': round(time.time()-time_start, 1),
'result': result})
}
return response