Search…
YAML
In YAML format
Apart from JSON, the configuration file also can be in YAML format. It is identical to JSON but more human-readable data representation and pretty straightforward. Difference to JSON, YAML's hierarchy is denoted by using double space characters as an example below.
1
---
2
host: "localhost"
3
port: 0
4
use_ssl: false
5
protocol: "HTTP"
6
static_content_root: "static-content"
7
static_content_url: "/static-content"
8
static_content_index_page: "/index.html"
9
kafka_configuration:
10
start_http_server_for_kafka: true
11
http_kafka_host: "localhost"
12
http_kafka_port: 0
13
consumer_topic_name: "inference-in"
14
consumer_key_deserializer_class: "io.vertx.kafka.client.serialization.JsonObjectDeserializer"
15
consumer_value_deserializer_class: "io.vertx.kafka.client.serialization.JsonObjectDeserializer"
16
consumer_group_id: "konduit-serving-consumer-group"
17
consumer_auto_offset_reset: "earliest"
18
consumer_auto_commit: "true"
19
producer_topic_name: "inference-out"
20
producer_key_serializer_class: "io.vertx.kafka.client.serialization.JsonObjectSerializer"
21
producer_value_serializer_class: "io.vertx.kafka.client.serialization.JsonObjectSerializer"
22
producer_acks: "1"
23
mqtt_configuration: {}
24
custom_endpoints: []
25
pipeline:
26
steps:
27
- '@type': "DEEPLEARNING4J"
28
modelUri: "<path_to_model>"
29
inputNames:
30
- "1"
31
- "2"
32
outputNames:
33
- "11"
34
- "22"
35
- '@type': "LOGGING"
36
logLevel: "INFO"
37
log: "KEYS_AND_VALUES"
Copied!
Example of default YAML configuration file with custom Graph Pipeline Steps:
1
---
2
host: "localhost"
3
port: 0
4
use_ssl: false
5
protocol: "HTTP"
6
static_content_root: "static-content"
7
static_content_url: "/static-content"
8
static_content_index_page: "/index.html"
9
kafka_configuration:
10
start_http_server_for_kafka: true
11
http_kafka_host: "localhost"
12
http_kafka_port: 0
13
consumer_topic_name: "inference-in"
14
consumer_key_deserializer_class: "io.vertx.kafka.client.serialization.JsonObjectDeserializer"
15
consumer_value_deserializer_class: "io.vertx.kafka.client.serialization.JsonObjectDeserializer"
16
consumer_group_id: "konduit-serving-consumer-group"
17
consumer_auto_offset_reset: "earliest"
18
consumer_auto_commit: "true"
19
producer_topic_name: "inference-out"
20
producer_key_serializer_class: "io.vertx.kafka.client.serialization.JsonObjectSerializer"
21
producer_value_serializer_class: "io.vertx.kafka.client.serialization.JsonObjectSerializer"
22
producer_acks: "1"
23
mqtt_configuration: {}
24
custom_endpoints: []
25
pipeline:
26
outputStep: "4"
27
steps:
28
"1":
29
'@type': "LOGGING"
30
'@input': "input"
31
logLevel: "INFO"
32
log: "KEYS_AND_VALUES"
33
"2":
34
'@type': "TENSORFLOW"
35
'@input': "1"
36
input_names:
37
- "1"
38
- "2"
39
output_names:
40
- "11"
41
- "22"
42
model_uri: "<path_to_model>"
43
"3":
44
'@type': "DEEPLEARNING4J"
45
'@input': "1"
46
modelUri: "<path_to_model>"
47
inputNames:
48
- "1"
49
- "2"
50
outputNames:
51
- "11"
52
- "22"
53
"4":
54
'@type': "MERGE"
55
'@input':
56
- "2"
57
- "3"
Copied!
For more details on how to create the configuration file, please refer to the examples:
Last modified 1yr ago
Copy link