-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathserver.jl
executable file
·135 lines (111 loc) · 3.56 KB
/
server.jl
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
#!/usr/bin/env -S julia --project
#
# $ curl -X POST -H "Content-Type: application/json" \
# -d @./path/input.json http://host:port/predict
# $ curl -X POST -H "Content-Type: application/json" \
# -d '{ "Vgs" : [ ... ]
# , "Vds" : [ ...]
# , "Vbs" : [ 0 ... 0 ]
# , "W" : [ ...]
# , "L" : [ ... ] }' \
# http://host:port/predict
using ArgParse
function parseArgs()
usg = """
You can POST a JSON file:
`curl -X POST -H 'Content-Type: application/json' -d @./path/input.json http://host:port/predict` or put the data plain in the request:
or
`curl -X POST -H "Content-Type: application/json" \\
-d '{ "Vgs" : [ ... ]
, "Vds" : [ ...]
, "Vbs" : [ 0 ... 0 ]
, "W" : [ ...]
, "L" : [ ... ] }' \\
http://host:port/predict`"""
settings = ArgParseSettings( usage = usg
, version = "0.0.1"
, add_version = true )
@add_arg_table settings begin
"--host", "-i"
help = "IP Host Address on which the server listens."
arg_type = String
default = read(`hostname -I`, String) |> rstrip |> String;
"--port", "-p"
help = "IP Host Address on which the server listens."
arg_type = Int
default = 8080
"--quiet", "-q"
help = "Be Quiet! Supress Verbose output."
action = :store_true
"model"
help = "Model Direcotry. Should only contain 1 model (.bson) otherwise behaviour is undefined."
arg_type = String
required = true
end
return parse_args(settings)
end
args = parseArgs()
host = args["host"]
port = args["port"]
quiet = args["quiet"]
path = args["model"]
using HTTP
using JSON
using BSON
using PyCall
using ScikitLearn
using Flux
using Printf: @printf
joblib = pyimport("joblib");
struct Model
net
paramsX
paramsY
trafoX
trafoY
end
function predict(model, req::HTTP.Request)
io = IOBuffer(HTTP.payload(req))
inp = JSON.parse(io)
X = hcat([ if haskey(inp, x)
inp[x]
elseif x == "QVgs"
inp["Vgs"] .* 2.0
elseif x == "EVds"
exp.(inp["Vds"])
end
for x in model.paramsX ]...)'
Y = ((length(size(X)) < 2) ? [X'] : X') |>
model.trafoX.transform |>
adjoint |> model.net |> adjoint |>
model.trafoY.inverse_transform |>
adjoint
out = Dict( ( model.paramsY[i] => Float64.(Y[i,:])
for i = 1:length(model.paramsY) ) )
HTTP.Response(200, JSON.json(out))
end
function loadModel(path::String)
#path = "./model/dev-2021-01-04T10:33:31.124/"
files = readdir(path)
modelFile = filter((f) -> endswith(f, "bson"), files) |> first
model = BSON.load(path * modelFile)
net = model[:model]
paramsX = model[:paramsX]
paramsY = model[:paramsY]
trafoXFile = filter((f) -> endswith(f, "input"), files) |> first
trafoYFile = filter((f) -> endswith(f, "output"), files) |> first
trafoX = joblib.load(path * trafoXFile)
trafoY = joblib.load(path * trafoYFile)
return Model(net, paramsX, paramsY, trafoX, trafoY)
end
if !quiet
@printf("Loading Model from %s\n", path)
end
model = loadModel(path)
router = HTTP.Router()
HTTP.@register( router, "POST", "predict"
, (req) -> predict(model, req) )
if !quiet
@printf("Starting Predict Server ...\nListening on %s:%s", host, port)
end
HTTP.serve(router, host, port)