📄 main.c
字号:
/* * $Id: main.c 1272 2007-05-09 16:26:20Z mhe $ */#include <math.h>#include <stdio.h>#include <stdlib.h>#include <sys/time.h>#include <string.h>#include <time.h>#include "lstm.h"#include "roc.h"#include "main.h"long random();void srandom();void usage(char *progname) { fprintf(stderr, "\nusage: %s -c configfile [-w weightfile [-test]]\n\n", progname);}int main(int argc, char *argv[]) { int i, j, k; int load = 0; int test = 0; char *configfile = NULL; char *weightfile2load = NULL; if (argc < 3 || argc > 6) { usage(argv[0]); exit(1); } /* configfile */ if (!strcmp(argv[1], "-c")) { configfile = argv[2]; } else { usage(argv[0]); exit(1); } /* load weight file ? */ if (argc > 4) { if (!strcmp(argv[3], "-w")) { load = 1; weightfile2load = argv[4]; if (!weightfile2load) { usage(argv[0]); exit(1); } } else { usage(argv[0]); exit(1); } } /* test? */ if (argc > 5) { if (!strcmp(argv[5], "-test")) { test = 1; } else { usage(argv[0]); exit(1); } } /* read in parameters */ getpars(configfile); /* allocate memory */ alloc(); /* read in sequences */ printf("data..\n\n"); int offset; offset = 0; if (!test) { negativetrain = get_data_sets("training", 0, fasta_desc, tar, aaSym, offset, length, traindata_negative); offset = negativetrain; // clone positive sequences for balancing out datasets while (positivetrain < negativetrain / 5) { offset += get_data_sets("training", 1, fasta_desc, tar, aaSym, offset, length, traindata_positive); positivetrain = offset - negativetrain; } // clone negative sequences for balancing out datasets while (negativetrain < positivetrain / 5) { offset += get_data_sets("training", 0, fasta_desc, tar, aaSym, offset, length, traindata_negative); negativetrain = offset - positivetrain; } } // test -> no training sequences else { negativetrain = 0; positivetrain = 0; } positivetest = get_data_sets("test", 1, fasta_desc_t, tar_t, aaSym_t, 0, length_t, testdata_positive); negativetest = get_data_sets("test", 0, fasta_desc_t, tar_t, aaSym_t, positivetest, length_t, testdata_negative); training_size = positivetrain + negativetrain; if (!test) { fprintf(stderr, "Traintargets: positive %d %.2f%% negative %d\n", positivetrain, (double)positivetrain/((double)negativetrain + (double)positivetrain)*100, negativetrain); fprintf(stderr, "Trainsize: %d\n\n", training_size); } test_size = positivetest + negativetest; fprintf(stderr, "Testtargets: positive %d %.2f%% negative %d\n", positivetest, (double)positivetest/((double)negativetest + (double)positivetest)*100, negativetest); fprintf(stderr, "Testsize: %d\n\n", test_size); /* memory for position codes */ alloc2(training_size, test_size); /* half of the window */ wside = (windowsize - 1) / 2; /* number of inputs */ in_mod = AAS * windowsize - 1; /* bias unit */ in_mod_b = in_mod + 1; /* number of inputs for local coding */ in_nn_mod = windowsize; out_mod = OUT_UNITS; cell_mod=in_mod_b; for (i=0;i<num_blocks;i++) { cell_mod += (2+block_size[i]); } cell_mod++; ges_mod = cell_mod+out_mod; alloc3(ges_mod + 1); allocarrs(training_size, test_size); //fprintf(stderr, "in_mod: %d in_mod_b: %d in_nn_mod: %d num_blocks: %d \n" , in_mod, in_mod_b, in_nn_mod, num_blocks); //fprintf(stderr, "cell_mod: %d out_mod: %d ges_mod: %d\n", cell_mod, out_mod, ges_mod); fprintf(stderr, "memory cells: %d\n", num_blocks); fprintf(stderr, "windowsize: %d\n", windowsize); /* initialization of random generator */ if (ran_sta) { srandom(ran_sta); printf("random seed: %d\n", ran_sta); } else { srandom(time(NULL)); printf("random seed: time\n"); } initia(load, weightfile2load); //printf("init\n"); stop_learn=0; learn = 1; prot_current_idx = 0; /* test */ if (test) { set_sequence(inp_t, inp_idx_t, aaSym_t, length_t[0], 0); nettest(); exit(0); } /* prepare first trainingdata */ shuffle(prand, training_size); set_sequence(inp, inp_idx, aaSym, length[prand[0]], prand[0]); class_err = 0; classnon_err = 0; seq_err = 0; epoch_err = 0; example = 0; element = 0; fprintf(stderr, "learning rate: %f\n", alpha); fprintf(stderr, "testing every %d epochs\n", test_aus); fprintf(stderr, "stopping training after %d epochs\n\n", maxepoch); fprintf(stderr, "running...\n"); while (learn == 1) { /* executing the environment and setting the input */ execute_act(); /* forward pass */ forward_pass(1, element, inp_idx, inp); if (targ) /* only if target for this input */ { /* compute error */ for (k=cell_mod,j=0;k<ges_mod;k++,j++) { error[0] = target - Yk_new[k]; } /* error */ if (target > targetvalue1) { ispos = 1; } else { ispos = 0; } /* Training error */ comp_err(Yk_new[cell_mod], &sptrain[prot_current_idx]); /* backward pass */ backward_pass(); /* counting the number of sequences in a epoch */ example++; } else { derivatives(); } /* time forward */ for (i=0;i<ges_mod;i++) { Yk_old[i] = Yk_new[i]; } /* update weights */ if (weight_up==1) { weight_up=0; weight_update(); } element++; if (epoch > maxepoch - 1) { weight_out(W); reset_net(); prot_current_idx = 0; set_sequence(inp_t, inp_idx_t, aaSym_t, length_t[0], 0); nettest(); exit(0); } } weight_out(W); prot_current_idx = 0; set_sequence(inp_t, inp_idx_t, aaSym_t, length_t[0], 0); reset_net(); nettest(); shuffle(prand, training_size); prot_current_idx = 0; element = 0; set_sequence(inp, inp_idx, aaSym, length[prand[prot_current_idx]], prand[prot_current_idx]); reset_net(); return 0;}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -