layer

// darknet.h

typedef enum {
    CONVOLUTIONAL,
    DECONVOLUTIONAL,
    CONNECTED,
    MAXPOOL,
    SOFTMAX,
    DETECTION,
    DROPOUT,
    CROP,
    ROUTE,
    COST,
    NORMALIZATION,
    AVGPOOL,
    LOCAL,
    SHORTCUT,
    ACTIVE,
    RNN,
    GRU,
    LSTM,
    CRNN,
    BATCHNORM,
    NETWORK,
    XNOR,
    REGION,
    YOLO,
    ISEG,
    REORG,
    UPSAMPLE,
    LOGXENT,
    L2NORM,
    BLANK
} LAYER_TYPE;

์ด ์ฝ”๋“œ๋Š” ์—ด๊ฑฐํ˜•(enum)์œผ๋กœ LAYER_TYPE์ด๋ผ๋Š” ํƒ€์ž…์„ ์ •์˜ํ•˜๊ณ  ์žˆ์Šต๋‹ˆ๋‹ค. LAYER_TYPE์€ ๋‹ค์–‘ํ•œ ๋ ˆ์ด์–ด ์œ ํ˜•์„ ์ •์˜ํ•˜๊ณ  ์žˆ์œผ๋ฉฐ, ๊ฐ ๋ ˆ์ด์–ด ์œ ํ˜•์€ ํ•ด๋‹นํ•˜๋Š” ์ด๋ฆ„์œผ๋กœ ์ •์˜๋˜์–ด ์žˆ์Šต๋‹ˆ๋‹ค.

๋‹ค์Œ์€ ๊ฐ ๋ ˆ์ด์–ด ์œ ํ˜•๊ณผ ๊ทธ์— ํ•ด๋‹นํ•˜๋Š” ์ด๋ฆ„์ž…๋‹ˆ๋‹ค.

  • CONVOLUTIONAL: ์ปจ๋ณผ๋ฃจ์…˜(Convolution) ๋ ˆ์ด์–ด

  • DECONVOLUTIONAL: ๋””์ปจ๋ณผ๋ฃจ์…˜(Deconvolution) ๋ ˆ์ด์–ด

  • CONNECTED: ์™„์ „ ์—ฐ๊ฒฐ(Fully Connected) ๋ ˆ์ด์–ด

  • MAXPOOL: ๋งฅ์Šค ํ’€๋ง(Max Pooling) ๋ ˆ์ด์–ด

  • SOFTMAX: ์†Œํ”„ํŠธ๋งฅ์Šค(Softmax) ๋ ˆ์ด์–ด

  • DETECTION: ๊ฐ์ฒด ๊ฒ€์ถœ(Detection) ๋ ˆ์ด์–ด

  • DROPOUT: ๋“œ๋กญ์•„์›ƒ(Dropout) ๋ ˆ์ด์–ด

  • CROP: ํฌ๋กญ(Crop) ๋ ˆ์ด์–ด

  • ROUTE: ๋ฃจํŠธ(Route) ๋ ˆ์ด์–ด

  • COST: ๋น„์šฉ(Cost) ๋ ˆ์ด์–ด

  • NORMALIZATION: ์ •๊ทœํ™”(Normalization) ๋ ˆ์ด์–ด

  • AVGPOOL: ํ‰๊ท  ํ’€๋ง(Average Pooling) ๋ ˆ์ด์–ด

  • LOCAL: ๋กœ์ปฌ(Local) ๋ ˆ์ด์–ด

  • SHORTCUT: ์ˆ์ปท(Shortcut) ๋ ˆ์ด์–ด

  • ACTIVE: ํ™œ์„ฑํ™”(Activation) ๋ ˆ์ด์–ด

  • RNN: ์ˆœํ™˜ ์‹ ๊ฒฝ๋ง(Recurrent Neural Network) ๋ ˆ์ด์–ด

  • GRU: ๊ฒŒ์ดํŠธ ์ˆœํ™˜ ์œ ๋‹›(Gated Recurrent Unit) ๋ ˆ์ด์–ด

  • LSTM: ์žฅ๋‹จ๊ธฐ ๋ฉ”๋ชจ๋ฆฌ(Long Short-Term Memory) ๋ ˆ์ด์–ด

  • CRNN: ํ•ฉ์„ฑ๊ณฑ ์ˆœํ™˜ ์‹ ๊ฒฝ๋ง(Convolutional Recurrent Neural Network) ๋ ˆ์ด์–ด

  • BATCHNORM: ๋ฐฐ์น˜ ์ •๊ทœํ™”(Batch Normalization) ๋ ˆ์ด์–ด

  • NETWORK: ๋„คํŠธ์›Œํฌ(Network) ๋ ˆ์ด์–ด

  • XNOR: ์ด์ง„ํ™”(Binary) ๋ ˆ์ด์–ด

  • REGION: ์ง€์—ญ(Region) ๋ ˆ์ด์–ด

  • YOLO: YOLO(You Only Look Once) ๋ ˆ์ด์–ด

  • ISEG: ์ธ์Šคํ„ด์Šค ๋ถ„ํ• (Instance Segmentation) ๋ ˆ์ด์–ด

  • REORG: ๋ฆฌ์˜ค๊ทธ(Reorg) ๋ ˆ์ด์–ด

  • UPSAMPLE: ์—…์ƒ˜ํ”Œ(Upsample) ๋ ˆ์ด์–ด

  • LOGXENT: ๋กœ๊ทธ-์—”ํŠธ๋กœํ”ผ(Log-entropy) ๋ ˆ์ด์–ด

  • L2NORM: L2 ๋…ธ๋ฆ„(L2 Norm) ๋ ˆ์ด์–ด

  • BLANK: ๋นˆ(Blank) ๋ ˆ์ด์–ด

์ด ํ•จ์ˆ˜๋Š” LAYER_TYPE์ด๋ผ๋Š” ์—ด๊ฑฐํ˜•์„ ์ •์˜ํ•œ ๊ฒƒ์ด๋ฏ€๋กœ ์ž…๋ ฅ๊ฐ’๊ณผ ๋™์ž‘์€ ์—†์Šต๋‹ˆ๋‹ค.

free_layer

void free_layer(layer l)
{
    if(l.type == DROPOUT){
        if(l.rand)           free(l.rand);
        return;
    }
    if(l.cweights)           free(l.cweights);
    if(l.indexes)            free(l.indexes);
    if(l.input_layers)       free(l.input_layers);
    if(l.input_sizes)        free(l.input_sizes);
    if(l.map)                free(l.map);
    if(l.rand)               free(l.rand);
    if(l.cost)               free(l.cost);
    if(l.state)              free(l.state);
    if(l.prev_state)         free(l.prev_state);
    if(l.forgot_state)       free(l.forgot_state);
    if(l.forgot_delta)       free(l.forgot_delta);
    if(l.state_delta)        free(l.state_delta);
    if(l.concat)             free(l.concat);
    if(l.concat_delta)       free(l.concat_delta);
    if(l.binary_weights)     free(l.binary_weights);
    if(l.biases)             free(l.biases);
    if(l.bias_updates)       free(l.bias_updates);
    if(l.scales)             free(l.scales);
    if(l.scale_updates)      free(l.scale_updates);
    if(l.weights)            free(l.weights);
    if(l.weight_updates)     free(l.weight_updates);
    if(l.delta)              free(l.delta);
    if(l.output)             free(l.output);
    if(l.squared)            free(l.squared);
    if(l.norms)              free(l.norms);
    if(l.spatial_mean)       free(l.spatial_mean);
    if(l.mean)               free(l.mean);
    if(l.variance)           free(l.variance);
    if(l.mean_delta)         free(l.mean_delta);
    if(l.variance_delta)     free(l.variance_delta);
    if(l.rolling_mean)       free(l.rolling_mean);
    if(l.rolling_variance)   free(l.rolling_variance);
    if(l.x)                  free(l.x);
    if(l.x_norm)             free(l.x_norm);
    if(l.m)                  free(l.m);
    if(l.v)                  free(l.v);
    if(l.z_cpu)              free(l.z_cpu);
    if(l.r_cpu)              free(l.r_cpu);
    if(l.h_cpu)              free(l.h_cpu);
    if(l.binary_input)       free(l.binary_input);
}

ํ•จ์ˆ˜ ์ด๋ฆ„: free_layer

์ž…๋ ฅ:

  • layer ๊ตฌ์กฐ์ฒด (layer ํƒ€์ž… ํฌ์ธํ„ฐ ๋ณ€์ˆ˜ l)

๋™์ž‘:

  • layer ๊ตฌ์กฐ์ฒด์—์„œ ๋™์ ์œผ๋กœ ํ• ๋‹นํ•œ ๋ชจ๋“  ๋ฉ”๋ชจ๋ฆฌ๋ฅผ ํ•ด์ œํ•˜๋Š” ํ•จ์ˆ˜.

  • DROPOUT ๋ ˆ์ด์–ด์ธ ๊ฒฝ์šฐ l.rand ๋ณ€์ˆ˜๋งŒ ํ•ด์ œํ•˜๊ณ  ํ•จ์ˆ˜๋ฅผ ์ข…๋ฃŒํ•œ๋‹ค.

์„ค๋ช…:

  • ์ด ํ•จ์ˆ˜๋Š” ์ž…๋ ฅ์œผ๋กœ ์ „๋‹ฌ๋œ layer ๊ตฌ์กฐ์ฒด์—์„œ ๋™์ ์œผ๋กœ ํ• ๋‹น๋œ ๋ชจ๋“  ๋ฉ”๋ชจ๋ฆฌ๋ฅผ ํ•ด์ œํ•œ๋‹ค.

  • ํ• ๋‹น๋œ ๋ฉ”๋ชจ๋ฆฌ๊ฐ€ ์—†๋Š” ๊ฒฝ์šฐ ์•„๋ฌด๋Ÿฐ ๋™์ž‘๋„ ํ•˜์ง€ ์•Š๋Š”๋‹ค. DROPOUT ๋ ˆ์ด์–ด์ธ ๊ฒฝ์šฐ l.rand ๋ณ€์ˆ˜๋งŒ ํ•ด์ œํ•˜๊ณ  ํ•จ์ˆ˜๋ฅผ ์ข…๋ฃŒํ•œ๋‹ค.

  • ๋‚˜๋จธ์ง€ ๋ ˆ์ด์–ด์˜ ๊ฒฝ์šฐ, layer ๊ตฌ์กฐ์ฒด์—์„œ ์‚ฌ์šฉํ•˜๋Š” ๋ชจ๋“  ๋ณ€์ˆ˜๋ฅผ ์ˆœํšŒํ•˜๋ฉฐ ํ• ๋‹น๋œ ๋ฉ”๋ชจ๋ฆฌ๊ฐ€ ์žˆ๋Š” ๊ฒฝ์šฐ ๋ฉ”๋ชจ๋ฆฌ๋ฅผ ํ•ด์ œํ•œ๋‹ค.

  • ๊ฐ ๋ณ€์ˆ˜์— ๋Œ€ํ•œ ๋ฉ”๋ชจ๋ฆฌ ํ•ด์ œ๋Š” malloc ํ•จ์ˆ˜๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ํ• ๋‹น๋œ ๊ฒƒ๊ณผ ๋™์ผํ•œ ๋ฐฉ์‹์œผ๋กœ ์ด๋ฃจ์–ด์ง„๋‹ค.

Last updated

Was this helpful?