Skip to content

Commit 5e12225

Browse files
committed
Cleanup example configs
1 parent 751f643 commit 5e12225

5 files changed

+71
-66
lines changed

config-files/garnet_1layer_config.yml

+5-5
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,12 @@
1-
KerasJson: garnet_1layer.json
2-
KerasH5: garnet_1layer_weights.h5
1+
KerasJson: keras/garnet_1layer.json
2+
KerasH5: keras/garnet_1layer_weights.h5
3+
Backend: Vivado
34
OutputDir: garnet_1layer
45
ProjectName: myproject
5-
XilinxPart: xcku115-flvb2104-2-i
6+
Part: xcvu9p-flga2577-2-e
67
ClockPeriod: 5
78

8-
IOType: io_parallel # options: io_serial/io_parallel
9+
IOType: io_parallel
910

1011
HLSConfig:
1112
Model:
@@ -33,4 +34,3 @@ HLSConfig:
3334
regression_linear:
3435
Precision:
3536
result: ap_fixed<16, 6, AP_RND, AP_SAT>
36-
Optimizers: ['eliminate_linear_activation']

config-files/garnet_3layer_config.yml

+5-5
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,12 @@
1-
KerasJson: garnet_3layer.json
2-
KerasH5: garnet_3layer_weights.h5
1+
KerasJson: keras/garnet_3layer.json
2+
KerasH5: keras/garnet_3layer_weights.h5
3+
Backend: Vivado
34
OutputDir: garnet_3layer
45
ProjectName: myproject
5-
XilinxPart: xcku115-flvb2104-2-i
6+
Part: xcvu9p-flga2577-2-e
67
ClockPeriod: 5
78

8-
IOType: io_parallel # options: io_serial/io_parallel
9+
IOType: io_parallel
910

1011
HLSConfig:
1112
Model:
@@ -34,4 +35,3 @@ HLSConfig:
3435
regression_linear:
3536
Precision:
3637
result: ap_fixed<16, 6, AP_RND, AP_SAT>
37-
Optimizers: ['eliminate_linear_activation']

config-files/qkeras_3layer_config.yml

+12-10
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,18 @@
1+
KerasJson: keras/qkeras_3layer.json
2+
KerasH5: keras/qkeras_3layer_weights.h5
13
Backend: Vivado
4+
OutputDir: qkeras_3layer
5+
ProjectName: myproject
6+
Part: xcvu9p-flga2577-2-e
27
ClockPeriod: 5
8+
9+
IOType: io_parallel
10+
311
HLSConfig:
12+
Model:
13+
Precision: ap_fixed<16,6>
14+
ReuseFactor: 1
15+
Strategy: Latency
416
LayerName:
517
fc1:
618
Precision:
@@ -52,13 +64,3 @@ HLSConfig:
5264
exp_table_t: ap_fixed<18,8>
5365
inv_table_t: ap_fixed<18,4>
5466
table_size: 1024
55-
Model:
56-
Precision: ap_fixed<16,6>
57-
ReuseFactor: 1
58-
Strategy: Latency
59-
IOType: io_parallel
60-
KerasH5: keras/qkeras_3layer_weights.h5
61-
KerasJson: keras/qkeras_3layer.json
62-
OutputDir: my-hls-test
63-
ProjectName: myproject
64-
XilinxPart: xcku115-flvb2104-2-i

config-files/qkeras_mnist_cnn_config.yml

+12-10
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,18 @@
1+
KerasJson: keras/qkeras_mnist_cnn.json
2+
KerasH5: keras/qkeras_mnist_cnn_weights.h5
13
Backend: Vivado
4+
OutputDir: qkeras_mnist_cnn
5+
ProjectName: myproject
6+
Part: xcvu9p-flga2577-2-e
27
ClockPeriod: 5
8+
9+
IOType: io_stream
10+
311
HLSConfig:
12+
Model:
13+
Precision: ap_fixed<16,6>
14+
ReuseFactor: 16
15+
Strategy: Latency
416
LayerName:
517
input_1:
618
Precision:
@@ -47,13 +59,3 @@ HLSConfig:
4759
exp_table_t: ap_fixed<18,12,AP_RND,AP_SAT>
4860
inv_table_t: ap_fixed<18,4,AP_RND,AP_SAT>
4961
table_size: 1024
50-
Model:
51-
Precision: ap_fixed<16,6>
52-
ReuseFactor: 16
53-
Strategy: Latency
54-
IOType: io_stream
55-
KerasH5: keras/qkeras_mnist_cnn_weights.h5
56-
KerasJson: keras/qkeras_mnist_cnn.json
57-
OutputDir: my-hls-test
58-
ProjectName: myproject
59-
XilinxPart: xcku115-flvb2104-2-i
+37-36
Original file line numberDiff line numberDiff line change
@@ -1,45 +1,46 @@
1-
KerasJson: qkeras_mnist_dense.json
2-
KerasH5: qkeras_mnist_dense_weights.h5
1+
KerasJson: keras/qkeras_mnist_dense.json
2+
KerasH5: keras/qkeras_mnist_dense_weights.h5
3+
Backend: Vivado
34
OutputDir: qkeras_mnist_dense
45
ProjectName: myproject
5-
XilinxPart: xcku115-flvb2104-2-i
6+
Part: xcvu9p-flga2577-2-e
67
ClockPeriod: 5
78

8-
IOType: io_parallel # options: io_serial/io_parallel
9+
IOType: io_parallel
910

1011
HLSConfig:
11-
Model:
12+
Model:
13+
Precision: ap_fixed<16,6>
14+
ReuseFactor: 1
15+
Strategy: Resource
16+
LayerName:
17+
activation_3:
1218
Precision: ap_fixed<16,6>
1319
ReuseFactor: 1
14-
Strategy: Resource
15-
LayerName:
16-
activation_3:
17-
Precision: ap_fixed<16,6>
18-
ReuseFactor: 1
19-
Strategy: Stable
20-
exp_table_t: ap_fixed<18,8,AP_RND,AP_SAT>
21-
inv_table_t: ap_fixed<18,8,AP_RND,AP_SAT>
22-
table_size: 1024
23-
input_3:
24-
Precision:
25-
result: ap_fixed<16,6>
26-
q_activation_2:
27-
Precision:
28-
result: ap_fixed<3,1>
29-
ReuseFactor: 1
30-
q_activation_3:
31-
Precision:
32-
result: ap_fixed<4,2>
33-
ReuseFactor: 1
34-
q_dense_2:
35-
Precision:
36-
bias: ap_fixed<3,1>
37-
weight: ap_int<5>
38-
ReuseFactor: 112
39-
q_dense_3:
40-
Precision:
41-
bias: ap_fixed<5,1>
42-
weight: ap_fixed<5,1>
43-
ReuseFactor: 1
44-
Strategy: Latency
20+
Strategy: Stable
21+
exp_table_t: ap_fixed<18,8,AP_RND,AP_SAT>
22+
inv_table_t: ap_fixed<18,8,AP_RND,AP_SAT>
23+
table_size: 1024
24+
input_3:
25+
Precision:
26+
result: ap_fixed<16,6>
27+
q_activation_2:
28+
Precision:
29+
result: ap_fixed<3,1>
30+
ReuseFactor: 1
31+
q_activation_3:
32+
Precision:
33+
result: ap_fixed<4,2>
34+
ReuseFactor: 1
35+
q_dense_2:
36+
Precision:
37+
bias: ap_fixed<3,1>
38+
weight: ap_int<5>
39+
ReuseFactor: 112
40+
q_dense_3:
41+
Precision:
42+
bias: ap_fixed<5,1>
43+
weight: ap_fixed<5,1>
44+
ReuseFactor: 1
45+
Strategy: Latency
4546

0 commit comments

Comments
 (0)