-
Notifications
You must be signed in to change notification settings - Fork 28
/
Copy pathBitNetMCU_inference.h
33 lines (27 loc) · 1.35 KB
/
BitNetMCU_inference.h
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
#ifndef BITNETMCU_INFERENCE_H
#define BITNETMCU_INFERENCE_H
#include <stdint.h>
/**
* @brief Applies a ReLU activation function to an array of integers and normalizes the result to 8-bit integers.
*
* @param input Pointer to the input array of 32-bit integers.
* @param output Pointer to the output array of 8-bit integers.
* @param n_input The number of elements in the input array.
* @return The position of maximum value found in the input array before applying the ReLU activation.
*/
uint32_t ReLUNorm(int32_t *input, int8_t *output, uint32_t n_inpu);
/**
* @brief Processes a fully connected layer in a neural network.
*
* This function processes a fully connected layer in a neural network by performing
* the dot product of the input activations and weights, and stores the result in the output array.
*
* @param activations Pointer to the input activations of the layer.
* @param weights Pointer to the weights of the layer.
* @param bits_per_weight The number of bits per weight.
* @param n_input The number of input neurons.
* @param n_output The number of output neurons.
* @param output Pointer to the output array where the result of the layer is stored.
*/
void processfclayer(int8_t *input, const uint32_t *weights, int32_t bits_per_weight, uint32_t incoming_weights, uint32_t outgoing_weights, int32_t *output);
#endif // BITNETMCU_INFERENCE_H