PROGRAM LIST. 1* Backpropagation Neuro-Control*1 #include <stdio.h> #include <math.h> #include <string.h> #include <stdlib.h>
|
|
- Nathaniel Roberts
- 5 years ago
- Views:
Transcription
1 PROGRAM LIST 1* This program can be used to train a neural network model with *1 1* one hidden layer to learn the inverse dynamics of the plant *1 1* Once trained the neural network model can *1 1* be used as a direct controller to the plant *1 1* The plant input-output characteristics must first be *1 1* generated and kept in a file called in-out.dat *1 1* This program runs under the Borland C++ Version 3.1 Environment *1 1* Backpropagation Neuro-Control*1 #include <stdio.h> #include <math.h> #include <string.h> #include <stdlib.h> 1* Declare the total number of nodes and pattern *1 #define No_oCInpuCUnits 3 1* No. of nodes of input layer i *1 #define No_oCHidden_Units 15 1* No. of nodes of hidden layer j *1 #define No_oCOutpuCUnits 1 1* No. of node s of output layer k *1 #define No_oCPatterns 8 1* Total no. of patterns p *1 #define Eta #define Alpha #define Beta #define ErrorFunc #define W min #define Wmax #define No_oCData * The learning rate * * Momentum constant * * Acceleration constant * * Maximum error allowed * * For use in random function * * to generate random values *1 101 I*Total number of plant input-output data *1 1* Functions *1 #define f(x) #define rnd( ) (1/(1 +exp( -(x)))) 1* Sigmoid function where x = net + bias*1 «float)rand( )/Ox7fff * (Wmax-Wmin) + Wmin) 1* This function is used to generate random values *1 1* Declare outputs, weights, and biases *1 float 01 [No_oCPatterns][No_oCInpuCUnits]; 1* Output of Input layer i *1 float 02[No_oCHidden_Units]; 1* Output of Hidden laye r j *1 float 03 [No_oCOutpucUnits]; 1* Output of Output layer k *1 float t[no_ocpatterns] [No_oCOutpucUnits];
2 246 Neuro-Control and Its Applications 1* Desired Output or target value *1 float w21[no_ochidden_units][no_oclnpucunits]; 1* Weights from i to j layer *1 float dw21 [No_oCHidden_Units][No_oClnpuCUnits][3]; 1* Change in the above weights *1 float w32[no_ocoutpucunits] [No_oCHidden_Units]; 1* Weights fromj to k layer *1 float dw32[no_ocoutpucunits] [No_oCHidden_Units][3]; 1* Change in the above weights *1 float bias2[no_ochidden_units]; 1* Bias to nodes in layer j (thetaj) *1 float dbias2[no_ochidden_units][3]; 1* Change in the above bias j *1 float bias3[no_ocoutpucunits]; 1* Bias to nodes in layer k (theta k) *1 float dbias3[no_ocoutpucunits][3]; 1* Change in the above bias j *1 float increase, erorfunc, er[3], u[no_ocdata], y[no_ocdata]; float Errorfunction int count, iterations; 1* Main Program *1 void main 0 I*begin main program*1 unsigned long i,j,k; static float errorfunc; void propagation(int); void back_propagation(int); void forward(int); void void read_input-outpucdata( ); initialize( ); void save_weight( ); void NN_inputs(); initialize( ); read_input-output_data( ); increase=o.o; for (errorfunc = 0.0, i=o; kno_ocpattems;i++) forward(i); for(j=o; j<no_ocoutpucunits; j++) errorfunc += pow(t[i][j] - o3[j], 2.0); printf("error Function is: %.5f\n", errorfunc); 1* error is the square oftarget - output *1 } errorfunc 1= 2; erorfunc=errorfunc;
3 Program List 247 printf("error Value is : %.3t\n", errorfunc); 1* print the error value *1 er[o]=errorfunc; fore i=o; errorfunc>errorfunc;) fore j=o; j<no_ocpatterns; j++ ) propagation(j); back_propagation(j); } fore errorfunc=o.o, j=o; j<no_ocpatterns;j++) forward(j); for(k=o; k<no_ocoutpucunits; k++) errorfunc += pow(t[j][k] - o3[k], 2.0); iterations= 1; ++i; increase+=(float)iterations; errorfunc 1= 2; erorfunc=errorfunc; printf(" 1= %f ",increase); printf("e = %.7t\n", errorfunc); er[1 ]=er[o]; er[o]=errorfunc; if( er[ 1 ]<er[o))break; }; printf("\o After Learning \0"); fore i=o; i<no_ocpatterns; i++) forward(i); save_ weighto; 1* To save tbe adapted weights in a data file *1 void save_weighto int I,J; FILE *fp; fp = fopen("weight.dat","w"); 1* To save the weights in layer j *1 fore i=o; i<no_ochidden_units; i++) fore j=o; j<no_ocinpucunits; j++) fprintf(fp, II %t\n",w21 [i][j)); fprintf(fp, "%t\n",bias2[i)); 1* To save tbe weights in layer k *1 fore i=o; i<no_ocoutpucunits; i++)
4 248 Neuro-Control and Its Applications for( j=o; j<no_ochidden_ Units; j++) fprintf(fp, "%t\n", w32[i] [j]); fprintf(fp, "%t\n",bias3[i]); } fc1ose(fp); 1* Procedure Propagation *1 1* To propagate the network forward */ void propagation(int p) int i,j; float net; for( i=o; i<no_ochidden_units; 1* layer j */ i++) for( net=o.o, j=o; j<no_oclnpucunits; j++) net += w21[i][j] * 01 [p][j]; 02[i] = f(net+bias2[i]); 1* sigmoid function *1 for( i=o; kno_ocoutpucunits; 1* layer k *1 i++) for( net=o.o, j=o; j<no_ochidden_ Units; j++) net += w32[i][j] * 02[j]; 03[i] = (net+bias3[i]); 1* output of NN not bounded by sigmoid function *1 1* Procedure Back Propagation *1 1* To backpropagate error through the network *1 void back_propagation (int p) int i,j; float d2[no_ochidden_units]; /* d2 is deltaj */ float d3[no_ocoutput_units]; 1* d3 is delta k *1 float sum; /* sum is add. of delta k * Wkj */ 1* to calculate delta k */ for( i=o; i < No_oCOutpuCUnits; i++ ) d3[i] = (t[p][i] - 03[i]); for( i=o; i<no_ochidden_units; i++) 1* to calculate the weights, w and dw, between layer j and k */ for(sum=o.o, j=o; j<no_ocoutpucunits; j++)
5 Program List 249 dw32[j][i][o] = Eta * d3[j] * 02[i] + Alpha * dw32[j][i][1] + Beta * dw32[j][i][2]; w32[j][i] += dw32[j][i][o]; sum += d3[j] * w32[j][i]; dw32[j][i][2] = dw32[j][i][1]; dw32[j][i][l] = dw32[j][i][o]; 1* to calculate delta j *1 d2[i] = 02[i] * (1-02[i]) * sum; 1* to calculate bias k *1 for( i=o; kno_ocoutpucunits; i++) dbias3[i][o] = Eta * d3[i] + Alpha * dbias3[i][l] + Beta * dbias3[i][2]; bias3[i] += dbias3[i][o]; dbias3[i][2] = dbias3[i][i]; dbias3[i][l] = dbias3[i][o]; 1* to calculate the weights, w and dw, between layer j and i *1 for( i=o; kno_ocinpucunits; i++) for( j=o; j<no_ochidden_units; j++ ) dw21 [j][i][o] = Eta * d2[j] * 01 [p][i] + Alpha * dw21[j][i][l] + Beta * dw21 [j][i][2]; w21 [j][i] += dw21 [j][i][o]; dw21[j][i][2] = dw21 [j][i][i]; dw21[j][i][1] = dw21[j][i][o]; 1* to calculate bias j *1 for (i=o; i<no_ochidden_units; i++) dbias2[i][o] = Eta * d2[i] + Alpha * dbias2[i][1] + Beta * dbias2[i][2]; bias2[i] += dbias2[i][o]; dbias2[i][2] = dbias2[i][1]; dbias2[i][1] = dbias2[i][o]; 1* Procedure to read input-output characteristics of systems *1 void read_input-outpucdata( ) int j,k; FILE *file; file = fopen("in-out.dat","r");
6 250 Neuro-Control and Its Applications 1* this file must be generated which consists of plant input-output characteristics *1 } for(j=o; j < No_oCData; j++) fscanf(file,"%f %f\n",&u[j],&y[j]); for(j=o; j < No_oCData; j++) printf("u[%d]=%f y[%d]=%f\n",j,u[j],j,y[j]); fclose(file ); 1* Procedure to equate NN inputs *1 void NN_inputsO 1* to select the patterns over entire input-output plant characteristics *1 int i, j, k; for(i=o,j=o; j < No_oCPatterns; j++) for(k=o; k <No_oCOutput_Units; k++) t[j][k]=u[i+2]; for(j=o; j < No_oCPatterns; j++) for(k=o; k <No_oCOutput_Units; k++) printf("t[%d][%d]=%f\n", j, k, t[j][k]); for(i=o, j=o; j < No_oCPatterns; j++) I*example for 3 NN input vectors *1 01 [j][0]=y[i+2]; 01 [j][i]=y[i+ 1]; 01 [j][2]=y[i]; for(j=o; j < No_oCPatterns; j++) for(k=o; k <No_oCInpuCUnits; k++) printf(" 1 [%d][%d]=%f\n",j,k,o 1 [j][k]); 1* Procedure forward*1 void forward(int p) int i; printf("\t%d ->", p+ 1); propagation(p ); for( i=o; kno_ocoutpucunits; i++)
7 Program List 251 printf (" o3[%d]=%2.5tm",p+ 1, o3[i]); 1* Procedure Initialize *1 1* To randomly initialize the weights *1 void initialize( ) int i,j; 1* To initialize the weights between layer j and i */ for (i=o; kno_ochidden_units; i++) for( j=o; j<no_ocinpuc Units; j++) w21[i][j] = mdo; bias2[i] = 1.0; 1* To initialize the weights between layer k and j *1 for (i=o; i<no_ocoutpucunits; i++) for( j=o; j<no_ochidden_units; j++) w32[i][j] = mdo; bias3[i] = 1.0; }
8 INDEX adaptive control , 82 adaptive critic ARMA model associative memory auto-tuning... 30, 77 axon backpropagation algorithm ,87,90-92,94,96,99,103,112,116, 117, 124, 166, 174, 176 backpropagation-through-time... 94, 95, 161 Boltzmann machine CARMA model CARIMA model... 34, 63 CASSIS... 1,3 certainty equivalent competitive learning control horizon cybernetics dendrites... 7, 8 derivative gain derivative time... 35, 152,210,211 direct inverse control/model... 89, 91, 108, 114, , distributed memory... 2 disturbance... 30, 39, 40, 63 electric vehicle , ,239 extended least squares feedback-error learning... 92,93, furnace , fuzzy associative memories fuzzy logic control , , fuzzy sets Gaussian noise generalized learning... 91, 92
9 254 Neuro-Control and Its Applications generalized minimum variance control generalized predictive control , Hopfield network INSPEC... 1,2 integral gain integral time , 187,210,211,214,215 intelligent control... 5 intelligent systems... 1, 2 inverted pendulum , Kohonen feature map learning machine... 12,13 learning rate... 17, 21, 24, 25,129, 142, 163, 179,213,225 least squares estimation , 56, 57, 66 load disturbances... 63, , 195, local minimum... 17,22-23 long range prediction , 79 maximum costing McCulloch-Pitts neuron m~~bership f~nctions... 69,70,71-73, 184, 185, 194, 199 InlnlmUm costing minimal order observer minimum variance control model reduction momentum term... 21, 24, 25, 129, 136,225 multivariable self-tuning PID control neuro-emulator ,95, , 112, 116, 136, 157, , 179,226 nonlinear observer , non-minimum phase... 38, 39 on-line learning , , , optimal control optimum prediction... 41, 54 overfitting parallel architecture... 2 parallel neuro-control... 95, parameter estimation ,37,56-57, 78 parameterization parameter variations , 191, 221 perceptron... 13, 14 PI, PID control , 29-31, 34-36, 86, 152, , ,221, pole-placement , 77
10 Index 255 predicted output error proportional gain Purkinje cell...,... 7,8 receding horizon control... 33,62 saturation... 35, 43 self-tuning control... 31,32,36-40 self-tuning neuro-control... 95, self-tuning PIIPID control... 32, self-tuning PID-type neuro-control , 208, series neuro-control... 95, sigmoid... 13,23,88,103, 112, 129, 137, 194,218 specialized inverse mapping/learning... 92, 99, 115 stability... 32,39,62,63, 172 steady-state error... 32,44 step-wise disturbance steepest descent , 100, 112, 114, 136, 140, 154 soma... 7 synapse... 7, 8 supervised control... 89,90 time-delay ,47,62-63,96,97,103,105,108,136,174, 177, ,237 UD factorization Von Neumann computers... 1 water bath , 130, , ,239
Supervised Learning in Neural Networks (Part 2)
Supervised Learning in Neural Networks (Part 2) Multilayer neural networks (back-propagation training algorithm) The input signals are propagated in a forward direction on a layer-bylayer basis. Learning
More information11/14/2010 Intelligent Systems and Soft Computing 1
Lecture 7 Artificial neural networks: Supervised learning Introduction, or how the brain works The neuron as a simple computing element The perceptron Multilayer neural networks Accelerated learning in
More informationNeural Networks. CE-725: Statistical Pattern Recognition Sharif University of Technology Spring Soleymani
Neural Networks CE-725: Statistical Pattern Recognition Sharif University of Technology Spring 2013 Soleymani Outline Biological and artificial neural networks Feed-forward neural networks Single layer
More informationMultilayer Feed-forward networks
Multi Feed-forward networks 1. Computational models of McCulloch and Pitts proposed a binary threshold unit as a computational model for artificial neuron. This first type of neuron has been generalized
More informationLECTURE NOTES Professor Anita Wasilewska NEURAL NETWORKS
LECTURE NOTES Professor Anita Wasilewska NEURAL NETWORKS Neural Networks Classifier Introduction INPUT: classification data, i.e. it contains an classification (class) attribute. WE also say that the class
More informationData Mining. Neural Networks
Data Mining Neural Networks Goals for this Unit Basic understanding of Neural Networks and how they work Ability to use Neural Networks to solve real problems Understand when neural networks may be most
More informationNeural Networks CMSC475/675
Introduction to Neural Networks CMSC475/675 Chapter 1 Introduction Why ANN Introduction Some tasks can be done easily (effortlessly) by humans but are hard by conventional paradigms on Von Neumann machine
More informationCOMPUTATIONAL INTELLIGENCE
COMPUTATIONAL INTELLIGENCE Fundamentals Adrian Horzyk Preface Before we can proceed to discuss specific complex methods we have to introduce basic concepts, principles, and models of computational intelligence
More informationLearning. Learning agents Inductive learning. Neural Networks. Different Learning Scenarios Evaluation
Learning Learning agents Inductive learning Different Learning Scenarios Evaluation Slides based on Slides by Russell/Norvig, Ronald Williams, and Torsten Reil Material from Russell & Norvig, chapters
More informationNeuro-Fuzzy Inverse Forward Models
CS9 Autumn Neuro-Fuzzy Inverse Forward Models Brian Highfill Stanford University Department of Computer Science Abstract- Internal cognitive models are useful methods for the implementation of motor control
More informationAssignment # 5. Farrukh Jabeen Due Date: November 2, Neural Networks: Backpropation
Farrukh Jabeen Due Date: November 2, 2009. Neural Networks: Backpropation Assignment # 5 The "Backpropagation" method is one of the most popular methods of "learning" by a neural network. Read the class
More informationCS6220: DATA MINING TECHNIQUES
CS6220: DATA MINING TECHNIQUES Image Data: Classification via Neural Networks Instructor: Yizhou Sun yzsun@ccs.neu.edu November 19, 2015 Methods to Learn Classification Clustering Frequent Pattern Mining
More informationNeural Networks. Neural Network. Neural Network. Neural Network 2/21/2008. Andrew Kusiak. Intelligent Systems Laboratory Seamans Center
Neural Networks Neural Network Input Andrew Kusiak Intelligent t Systems Laboratory 2139 Seamans Center Iowa City, IA 52242-1527 andrew-kusiak@uiowa.edu http://www.icaen.uiowa.edu/~ankusiak Tel. 319-335
More informationDeep Learning. Vladimir Golkov Technical University of Munich Computer Vision Group
Deep Learning Vladimir Golkov Technical University of Munich Computer Vision Group 1D Input, 1D Output target input 2 2D Input, 1D Output: Data Distribution Complexity Imagine many dimensions (data occupies
More informationDr. Qadri Hamarsheh Supervised Learning in Neural Networks (Part 1) learning algorithm Δwkj wkj Theoretically practically
Supervised Learning in Neural Networks (Part 1) A prescribed set of well-defined rules for the solution of a learning problem is called a learning algorithm. Variety of learning algorithms are existing,
More informationMulti-layer Perceptron Forward Pass Backpropagation. Lecture 11: Aykut Erdem November 2016 Hacettepe University
Multi-layer Perceptron Forward Pass Backpropagation Lecture 11: Aykut Erdem November 2016 Hacettepe University Administrative Assignment 2 due Nov. 10, 2016! Midterm exam on Monday, Nov. 14, 2016 You are
More informationModel learning for robot control: a survey
Model learning for robot control: a survey Duy Nguyen-Tuong, Jan Peters 2011 Presented by Evan Beachly 1 Motivation Robots that can learn how their motors move their body Complexity Unanticipated Environments
More informationNeural Networks in Statistica
http://usnet.us.edu.pl/uslugi-sieciowe/oprogramowanie-w-usk-usnet/oprogramowaniestatystyczne/ Neural Networks in Statistica Agnieszka Nowak - Brzezińska The basic element of each neural network is neuron.
More informationINTELLIGENT SEISMIC STRUCTURAL HEALTH MONITORING SYSTEM FOR THE SECOND PENANG BRIDGE OF MALAYSIA
INTELLIGENT SEISMIC STRUCTURAL HEALTH MONITORING SYSTEM FOR THE SECOND PENANG BRIDGE OF MALAYSIA Reni Suryanita Faculty of Engineering Civil Engineering Department University of Riau, Pekanbaru reni.suryanita@lecturer.unri.ac.id
More informationNatural Language Processing CS 6320 Lecture 6 Neural Language Models. Instructor: Sanda Harabagiu
Natural Language Processing CS 6320 Lecture 6 Neural Language Models Instructor: Sanda Harabagiu In this lecture We shall cover: Deep Neural Models for Natural Language Processing Introduce Feed Forward
More informationIntroduction to Neural Networks
Introduction to Neural Networks What are connectionist neural networks? Connectionism refers to a computer modeling approach to computation that is loosely based upon the architecture of the brain Many
More informationCHAPTER IX Radial Basis Function Networks
CHAPTER IX Radial Basis Function Networks Radial basis function (RBF) networks are feed-forward networks trained using a supervised training algorithm. They are typically configured with a single hidden
More informationNeural Networks (Overview) Prof. Richard Zanibbi
Neural Networks (Overview) Prof. Richard Zanibbi Inspired by Biology Introduction But as used in pattern recognition research, have little relation with real neural systems (studied in neurology and neuroscience)
More informationLecture 20: Neural Networks for NLP. Zubin Pahuja
Lecture 20: Neural Networks for NLP Zubin Pahuja zpahuja2@illinois.edu courses.engr.illinois.edu/cs447 CS447: Natural Language Processing 1 Today s Lecture Feed-forward neural networks as classifiers simple
More informationPerceptrons and Backpropagation. Fabio Zachert Cognitive Modelling WiSe 2014/15
Perceptrons and Backpropagation Fabio Zachert Cognitive Modelling WiSe 2014/15 Content History Mathematical View of Perceptrons Network Structures Gradient Descent Backpropagation (Single-Layer-, Multilayer-Networks)
More information11/14/2010 Intelligent Systems and Soft Computing 1
Lecture 8 Artificial neural networks: Unsupervised learning Introduction Hebbian learning Generalised Hebbian learning algorithm Competitive learning Self-organising computational map: Kohonen network
More informationNeural Network Neurons
Neural Networks Neural Network Neurons 1 Receives n inputs (plus a bias term) Multiplies each input by its weight Applies activation function to the sum of results Outputs result Activation Functions Given
More informationNeural Network Learning. Today s Lecture. Continuation of Neural Networks. Artificial Neural Networks. Lecture 24: Learning 3. Victor R.
Lecture 24: Learning 3 Victor R. Lesser CMPSCI 683 Fall 2010 Today s Lecture Continuation of Neural Networks Artificial Neural Networks Compose of nodes/units connected by links Each link has a numeric
More informationPattern Classification Algorithms for Face Recognition
Chapter 7 Pattern Classification Algorithms for Face Recognition 7.1 Introduction The best pattern recognizers in most instances are human beings. Yet we do not completely understand how the brain recognize
More informationCHAPTER VI BACK PROPAGATION ALGORITHM
6.1 Introduction CHAPTER VI BACK PROPAGATION ALGORITHM In the previous chapter, we analysed that multiple layer perceptrons are effectively applied to handle tricky problems if trained with a vastly accepted
More informationCMPT 882 Week 3 Summary
CMPT 882 Week 3 Summary! Artificial Neural Networks (ANNs) are networks of interconnected simple units that are based on a greatly simplified model of the brain. ANNs are useful learning tools by being
More informationMachine Learning. Deep Learning. Eric Xing (and Pengtao Xie) , Fall Lecture 8, October 6, Eric CMU,
Machine Learning 10-701, Fall 2015 Deep Learning Eric Xing (and Pengtao Xie) Lecture 8, October 6, 2015 Eric Xing @ CMU, 2015 1 A perennial challenge in computer vision: feature engineering SIFT Spin image
More informationClassification Lecture Notes cse352. Neural Networks. Professor Anita Wasilewska
Classification Lecture Notes cse352 Neural Networks Professor Anita Wasilewska Neural Networks Classification Introduction INPUT: classification data, i.e. it contains an classification (class) attribute
More informationCS 4510/9010 Applied Machine Learning. Neural Nets. Paula Matuszek Fall copyright Paula Matuszek 2016
CS 4510/9010 Applied Machine Learning 1 Neural Nets Paula Matuszek Fall 2016 Neural Nets, the very short version 2 A neural net consists of layers of nodes, or neurons, each of which has an activation
More informationCL7204-SOFT COMPUTING TECHNIQUES
VALLIAMMAI ENGINEERING COLLEGE 2015-2016(EVEN) [DOCUMENT TITLE] CL7204-SOFT COMPUTING TECHNIQUES UNIT I Prepared b Ms. Z. Jenifer A. P(O.G) QUESTION BANK INTRODUCTION AND NEURAL NETWORKS 1. What is soft
More informationNeural Nets. General Model Building
Neural Nets To give you an idea of how new this material is, let s do a little history lesson. The origins of neural nets are typically dated back to the early 1940 s and work by two physiologists, McCulloch
More informationIMPROVEMENTS TO THE BACKPROPAGATION ALGORITHM
Annals of the University of Petroşani, Economics, 12(4), 2012, 185-192 185 IMPROVEMENTS TO THE BACKPROPAGATION ALGORITHM MIRCEA PETRINI * ABSTACT: This paper presents some simple techniques to improve
More informationMachine Learning in Biology
Università degli studi di Padova Machine Learning in Biology Luca Silvestrin (Dottorando, XXIII ciclo) Supervised learning Contents Class-conditional probability density Linear and quadratic discriminant
More informationFor Monday. Read chapter 18, sections Homework:
For Monday Read chapter 18, sections 10-12 The material in section 8 and 9 is interesting, but we won t take time to cover it this semester Homework: Chapter 18, exercise 25 a-b Program 4 Model Neuron
More informationFast Learning for Big Data Using Dynamic Function
IOP Conference Series: Materials Science and Engineering PAPER OPEN ACCESS Fast Learning for Big Data Using Dynamic Function To cite this article: T Alwajeeh et al 2017 IOP Conf. Ser.: Mater. Sci. Eng.
More informationArtificial neural networks are the paradigm of connectionist systems (connectionism vs. symbolism)
Artificial Neural Networks Analogy to biological neural systems, the most robust learning systems we know. Attempt to: Understand natural biological systems through computational modeling. Model intelligent
More informationReview: Final Exam CPSC Artificial Intelligence Michael M. Richter
Review: Final Exam Model for a Learning Step Learner initially Environm ent Teacher Compare s pe c ia l Information Control Correct Learning criteria Feedback changed Learner after Learning Learning by
More informationArtificial Neural Network Methodology for Modelling and Forecasting Maize Crop Yield
Agricultural Economics Research Review Vol. 21 January-June 2008 pp 5-10 Artificial Neural Network Methodology for Modelling and Forecasting Maize Crop Yield Rama Krishna Singh and Prajneshu * Biometrics
More informationLinear Separability. Linear Separability. Capabilities of Threshold Neurons. Capabilities of Threshold Neurons. Capabilities of Threshold Neurons
Linear Separability Input space in the two-dimensional case (n = ): - - - - - - w =, w =, = - - - - - - w = -, w =, = - - - - - - w = -, w =, = Linear Separability So by varying the weights and the threshold,
More informationMini-project 2 CMPSCI 689 Spring 2015 Due: Tuesday, April 07, in class
Mini-project 2 CMPSCI 689 Spring 2015 Due: Tuesday, April 07, in class Guidelines Submission. Submit a hardcopy of the report containing all the figures and printouts of code in class. For readability
More informationUnderstanding Andrew Ng s Machine Learning Course Notes and codes (Matlab version)
Understanding Andrew Ng s Machine Learning Course Notes and codes (Matlab version) Note: All source materials and diagrams are taken from the Coursera s lectures created by Dr Andrew Ng. Everything I have
More informationArtificial Neural Networks. Introduction to Computational Neuroscience Ardi Tampuu
Artificial Neural Networks Introduction to Computational Neuroscience Ardi Tampuu 7.0.206 Artificial neural network NB! Inspired by biology, not based on biology! Applications Automatic speech recognition
More informationYuki Osada Andrew Cannon
Yuki Osada Andrew Cannon 1 Humans are an intelligent species One feature is the ability to learn The ability to learn comes down to the brain The brain learns from experience Research shows that the brain
More information4.12 Generalization. In back-propagation learning, as many training examples as possible are typically used.
1 4.12 Generalization In back-propagation learning, as many training examples as possible are typically used. It is hoped that the network so designed generalizes well. A network generalizes well when
More informationCHAPTER 6 COUNTER PROPAGATION NEURAL NETWORK FOR IMAGE RESTORATION
135 CHAPTER 6 COUNTER PROPAGATION NEURAL NETWORK FOR IMAGE RESTORATION 6.1 INTRODUCTION Neural networks have high fault tolerance and potential for adaptive training. A Full Counter Propagation Neural
More informationCOMPUTATIONAL INTELLIGENCE
COMPUTATIONAL INTELLIGENCE Radial Basis Function Networks Adrian Horzyk Preface Radial Basis Function Networks (RBFN) are a kind of artificial neural networks that use radial basis functions (RBF) as activation
More informationA Novel Pruning Algorithm for Optimizing Feedforward Neural Network of Classification Problems
Chapter 5 A Novel Pruning Algorithm for Optimizing Feedforward Neural Network of Classification Problems 5.1 Introduction Many researchers have proposed pruning algorithms in numerous ways to optimize
More informationCS 6501: Deep Learning for Computer Graphics. Training Neural Networks II. Connelly Barnes
CS 6501: Deep Learning for Computer Graphics Training Neural Networks II Connelly Barnes Overview Preprocessing Initialization Vanishing/exploding gradients problem Batch normalization Dropout Additional
More informationKnowledge Discovery and Data Mining. Neural Nets. A simple NN as a Mathematical Formula. Notes. Lecture 13 - Neural Nets. Tom Kelsey.
Knowledge Discovery and Data Mining Lecture 13 - Neural Nets Tom Kelsey School of Computer Science University of St Andrews http://tom.home.cs.st-andrews.ac.uk twk@st-andrews.ac.uk Tom Kelsey ID5059-13-NN
More informationKnowledge Discovery and Data Mining
Knowledge Discovery and Data Mining Lecture 13 - Neural Nets Tom Kelsey School of Computer Science University of St Andrews http://tom.home.cs.st-andrews.ac.uk twk@st-andrews.ac.uk Tom Kelsey ID5059-13-NN
More informationArtificial Intellegence
Artificial Intellegence Neural Net: Based on Nature Perceptron Variations Perceptrons: A Basic Neural Net In machine learning, the perceptron is an algorithm for supervised classification of an input into
More informationNetwork Traffic Measurements and Analysis
DEIB - Politecnico di Milano Fall, 2017 Sources Hastie, Tibshirani, Friedman: The Elements of Statistical Learning James, Witten, Hastie, Tibshirani: An Introduction to Statistical Learning Andrew Ng:
More informationMULTILAYER PERCEPTRON WITH ADAPTIVE ACTIVATION FUNCTIONS CHINMAY RANE. Presented to the Faculty of Graduate School of
MULTILAYER PERCEPTRON WITH ADAPTIVE ACTIVATION FUNCTIONS By CHINMAY RANE Presented to the Faculty of Graduate School of The University of Texas at Arlington in Partial Fulfillment of the Requirements for
More informationCOMP 551 Applied Machine Learning Lecture 14: Neural Networks
COMP 551 Applied Machine Learning Lecture 14: Neural Networks Instructor: (jpineau@cs.mcgill.ca) Class web page: www.cs.mcgill.ca/~jpineau/comp551 Unless otherwise noted, all material posted for this course
More informationMachine Learning Classifiers and Boosting
Machine Learning Classifiers and Boosting Reading Ch 18.6-18.12, 20.1-20.3.2 Outline Different types of learning problems Different types of learning algorithms Supervised learning Decision trees Naïve
More informationCHAPTER 7 MASS LOSS PREDICTION USING ARTIFICIAL NEURAL NETWORK (ANN)
128 CHAPTER 7 MASS LOSS PREDICTION USING ARTIFICIAL NEURAL NETWORK (ANN) Various mathematical techniques like regression analysis and software tools have helped to develop a model using equation, which
More informationCLASSIFICATION WITH RADIAL BASIS AND PROBABILISTIC NEURAL NETWORKS
CLASSIFICATION WITH RADIAL BASIS AND PROBABILISTIC NEURAL NETWORKS CHAPTER 4 CLASSIFICATION WITH RADIAL BASIS AND PROBABILISTIC NEURAL NETWORKS 4.1 Introduction Optical character recognition is one of
More information2. Neural network basics
2. Neural network basics Next commonalities among different neural networks are discussed in order to get started and show which structural parts or concepts appear in almost all networks. It is presented
More informationAPPLICATIONS OF INTELLIGENT HYBRID SYSTEMS IN MATLAB
APPLICATIONS OF INTELLIGENT HYBRID SYSTEMS IN MATLAB Z. Dideková, S. Kajan Institute of Control and Industrial Informatics, Faculty of Electrical Engineering and Information Technology, Slovak University
More informationSimple Model Selection Cross Validation Regularization Neural Networks
Neural Nets: Many possible refs e.g., Mitchell Chapter 4 Simple Model Selection Cross Validation Regularization Neural Networks Machine Learning 10701/15781 Carlos Guestrin Carnegie Mellon University February
More informationExercise: Training Simple MLP by Backpropagation. Using Netlab.
Exercise: Training Simple MLP by Backpropagation. Using Netlab. Petr Pošík December, 27 File list This document is an explanation text to the following script: demomlpklin.m script implementing the beckpropagation
More informationClimate Precipitation Prediction by Neural Network
Journal of Mathematics and System Science 5 (205) 207-23 doi: 0.7265/259-529/205.05.005 D DAVID PUBLISHING Juliana Aparecida Anochi, Haroldo Fraga de Campos Velho 2. Applied Computing Graduate Program,
More informationNeural network based Numerical digits Recognization using NNT in Matlab
Neural network based Numerical digits Recognization using NNT in Matlab ABSTRACT Amritpal kaur 1, Madhavi Arora 2 M.tech- ECE 1, Assistant Professor 2 Global institute of engineering and technology, Amritsar
More informationIMPLEMENTATION OF RBF TYPE NETWORKS BY SIGMOIDAL FEEDFORWARD NEURAL NETWORKS
IMPLEMENTATION OF RBF TYPE NETWORKS BY SIGMOIDAL FEEDFORWARD NEURAL NETWORKS BOGDAN M.WILAMOWSKI University of Wyoming RICHARD C. JAEGER Auburn University ABSTRACT: It is shown that by introducing special
More informationImageNet Classification with Deep Convolutional Neural Networks
ImageNet Classification with Deep Convolutional Neural Networks Alex Krizhevsky Ilya Sutskever Geoffrey Hinton University of Toronto Canada Paper with same name to appear in NIPS 2012 Main idea Architecture
More informationChapter 5 Neural Network Concepts and Paradigms
Chapter 5 Neural Network Concepts and Paradigms Chapter 5 Outline History Key elements and terminology Topologies Adaptation methods Recall dynamics Comparisons of neural and non-neural methods History
More informationNeural Networks. Robot Image Credit: Viktoriya Sukhanova 123RF.com
Neural Networks These slides were assembled by Eric Eaton, with grateful acknowledgement of the many others who made their course materials freely available online. Feel free to reuse or adapt these slides
More informationWeek 3: Perceptron and Multi-layer Perceptron
Week 3: Perceptron and Multi-layer Perceptron Phong Le, Willem Zuidema November 12, 2013 Last week we studied two famous biological neuron models, Fitzhugh-Nagumo model and Izhikevich model. This week,
More informationTransactions on Information and Communications Technologies vol 16, 1996 WIT Press, ISSN
Comparative study of fuzzy logic and neural network methods in modeling of simulated steady-state data M. Järvensivu and V. Kanninen Laboratory of Process Control, Department of Chemical Engineering, Helsinki
More informationAn Algorithm For Training Multilayer Perceptron (MLP) For Image Reconstruction Using Neural Network Without Overfitting.
An Algorithm For Training Multilayer Perceptron (MLP) For Image Reconstruction Using Neural Network Without Overfitting. Mohammad Mahmudul Alam Mia, Shovasis Kumar Biswas, Monalisa Chowdhury Urmi, Abubakar
More informationPerceptron as a graph
Neural Networks Machine Learning 10701/15781 Carlos Guestrin Carnegie Mellon University October 10 th, 2007 2005-2007 Carlos Guestrin 1 Perceptron as a graph 1 0.9 0.8 0.7 0.6 0.5 0.4 0.3 0.2 0.1 0-6 -4-2
More informationMachine Learning. The Breadth of ML Neural Networks & Deep Learning. Marc Toussaint. Duy Nguyen-Tuong. University of Stuttgart
Machine Learning The Breadth of ML Neural Networks & Deep Learning Marc Toussaint University of Stuttgart Duy Nguyen-Tuong Bosch Center for Artificial Intelligence Summer 2017 Neural Networks Consider
More informationThe Mathematics Behind Neural Networks
The Mathematics Behind Neural Networks Pattern Recognition and Machine Learning by Christopher M. Bishop Student: Shivam Agrawal Mentor: Nathaniel Monson Courtesy of xkcd.com The Black Box Training the
More informationNew methodology for calculating flight parameters with neural network EGD method
New methodology for calculating flight parameters with neural network EGD method Abdallah BEN MOSBAH, Ruxandra BOTEZ, Thien My DAO École de technologie supérieure (ÉTS), LARCASE, www.larcase.etsmtl.ca,
More informationDeep Learning with Tensorflow AlexNet
Machine Learning and Computer Vision Group Deep Learning with Tensorflow http://cvml.ist.ac.at/courses/dlwt_w17/ AlexNet Krizhevsky, Alex, Ilya Sutskever, and Geoffrey E. Hinton, "Imagenet classification
More informationNotes on Multilayer, Feedforward Neural Networks
Notes on Multilayer, Feedforward Neural Networks CS425/528: Machine Learning Fall 2012 Prepared by: Lynne E. Parker [Material in these notes was gleaned from various sources, including E. Alpaydin s book
More informationBatch Intrinsic Plasticity for Extreme Learning Machines
Batch Intrinsic Plasticity for Extreme Learning Machines Klaus Neumann and Jochen J. Steil Research Institute for Cognition and Robotics (CoR-Lab) Faculty of Technology, Bielefeld University Universitätsstr.
More informationDESIGN AND MODELLING OF A 4DOF PAINTING ROBOT
DESIGN AND MODELLING OF A 4DOF PAINTING ROBOT MSc. Nilton Anchaygua A. Victor David Lavy B. Jose Luis Jara M. Abstract The following project has as goal the study of the kinematics, dynamics and control
More informationImage Compression: An Artificial Neural Network Approach
Image Compression: An Artificial Neural Network Approach Anjana B 1, Mrs Shreeja R 2 1 Department of Computer Science and Engineering, Calicut University, Kuttippuram 2 Department of Computer Science and
More informationExperimental Data and Training
Modeling and Control of Dynamic Systems Experimental Data and Training Mihkel Pajusalu Alo Peets Tartu, 2008 1 Overview Experimental data Designing input signal Preparing data for modeling Training Criterion
More informationFAST NEURAL NETWORK ALGORITHM FOR SOLVING CLASSIFICATION TASKS
Virginia Commonwealth University VCU Scholars Compass Theses and Dissertations Graduate School 2012 FAST NEURAL NETWORK ALGORITHM FOR SOLVING CLASSIFICATION TASKS Noor Albarakati Virginia Commonwealth
More informationParticle Swarm Optimization
Particle Swarm Optimization Gonçalo Pereira INESC-ID and Instituto Superior Técnico Porto Salvo, Portugal gpereira@gaips.inesc-id.pt April 15, 2011 1 What is it? Particle Swarm Optimization is an algorithm
More informationTutorial 5. PDS Lab Section 16 Autumn Functions The C language is termed as function-oriented programming
PDS Lab Section 16 Autumn-2018 Tutorial 5 Functions The C language is termed as function-oriented programming Every C program consists of one or more functions. The concept is based on the divide-and conquer
More informationNeural Networks. Single-layer neural network. CSE 446: Machine Learning Emily Fox University of Washington March 10, /10/2017
3/0/207 Neural Networks Emily Fox University of Washington March 0, 207 Slides adapted from Ali Farhadi (via Carlos Guestrin and Luke Zettlemoyer) Single-layer neural network 3/0/207 Perceptron as a neural
More informationFast Training of Multilayer Perceptrons
Fast Training of Multilayer Perceptrons Brijesh Verma, Member of IEEE & IASTED School of Information Technology Faculty of Engineering and Applied Science Griffith University, Gold Coast Campus Gold Coast,
More information6. NEURAL NETWORK BASED PATH PLANNING ALGORITHM 6.1 INTRODUCTION
6 NEURAL NETWORK BASED PATH PLANNING ALGORITHM 61 INTRODUCTION In previous chapters path planning algorithms such as trigonometry based path planning algorithm and direction based path planning algorithm
More informationARTIFICIAL NEURAL NETWORK CIRCUIT FOR SPECTRAL PATTERN RECOGNITION
ARTIFICIAL NEURAL NETWORK CIRCUIT FOR SPECTRAL PATTERN RECOGNITION A Thesis by FARAH RASHEED Submitted to the Office of Graduate and Professional Studies of Texas A&M University in partial fulfillment
More informationNeural Network Weight Selection Using Genetic Algorithms
Neural Network Weight Selection Using Genetic Algorithms David Montana presented by: Carl Fink, Hongyi Chen, Jack Cheng, Xinglong Li, Bruce Lin, Chongjie Zhang April 12, 2005 1 Neural Networks Neural networks
More informationCMU Lecture 18: Deep learning and Vision: Convolutional neural networks. Teacher: Gianni A. Di Caro
CMU 15-781 Lecture 18: Deep learning and Vision: Convolutional neural networks Teacher: Gianni A. Di Caro DEEP, SHALLOW, CONNECTED, SPARSE? Fully connected multi-layer feed-forward perceptrons: More powerful
More informationLecture : Neural net: initialization, activations, normalizations and other practical details Anne Solberg March 10, 2017
INF 5860 Machine learning for image classification Lecture : Neural net: initialization, activations, normalizations and other practical details Anne Solberg March 0, 207 Mandatory exercise Available tonight,
More informationLecture 13. Deep Belief Networks. Michael Picheny, Bhuvana Ramabhadran, Stanley F. Chen
Lecture 13 Deep Belief Networks Michael Picheny, Bhuvana Ramabhadran, Stanley F. Chen IBM T.J. Watson Research Center Yorktown Heights, New York, USA {picheny,bhuvana,stanchen}@us.ibm.com 12 December 2012
More informationBackpropagation + Deep Learning
10-601 Introduction to Machine Learning Machine Learning Department School of Computer Science Carnegie Mellon University Backpropagation + Deep Learning Matt Gormley Lecture 13 Mar 1, 2018 1 Reminders
More informationOpening the Black Box Data Driven Visualizaion of Neural N
Opening the Black Box Data Driven Visualizaion of Neural Networks September 20, 2006 Aritificial Neural Networks Limitations of ANNs Use of Visualization (ANNs) mimic the processes found in biological
More informationProgramming Exercise 4: Neural Networks Learning
Programming Exercise 4: Neural Networks Learning Machine Learning Introduction In this exercise, you will implement the backpropagation algorithm for neural networks and apply it to the task of hand-written
More informationDeep Learning. Volker Tresp Summer 2014
Deep Learning Volker Tresp Summer 2014 1 Neural Network Winter and Revival While Machine Learning was flourishing, there was a Neural Network winter (late 1990 s until late 2000 s) Around 2010 there
More information