.


:




:

































 

 

 

 





 

.

. , , .

.

, . 10x10. 100 .

.

́ , . ( = .) (: 6, 2, 6, 6, 8, 9, 9, 9, 10; = 6 9). 100 , , . . , 16777216 . , . , , . , , , 16 . . , , Windows , , , 16, .

, , .

. 2^100 . , . Ascii. , , , , 2.6.

2.6: .

, - . : , .

, , #, . , . :

1:

.   - ()
, . , # ()
(( )
) ( )
_ ()

 

 

0 ()
, . , ()
, / ()
, \ ( )
, | ( )

 

. : , .

, . 770 ( 70 ).

 

 

.

 

c#. : , . , , . . , , .

- , : , , , , , , .

3.1:

 

, , , .

, . .

, .

( ) .

.

.

.

3.2: .

 

INeuroObject . . System.Object.

INeuralNetwork .

ILayer

ISingleLayer .

IMultiLayer ISingleLayer

.

 

3.3: .

ILearn . INeuralNetwork , .

ILearningConfig , . , .

INeuroRandom . , , . , , [-5,5] .

.

 

 

3.4: .

IFunction , , IErrorFunction IActivation .

IDifferentiableActivation (,, ). .

IPartialErrorFunction , . , .

IFullErrorFunction . , , .

.

. , (, , ) . , System.Double.

 

:

 

2 : , .

100 [0,1]. 10 . , . . 7 . . . . . . 317 . 0,3265. , 15% , 93,5%. 770 .

:

ISingleLayer<double>[] layers = new ISingleLayer<double>[2];

layers[0] = new SingleLayer(100, 7, new Neuro.MLP.ActivateFunction.Sigmoid(), new Random());

layers[1] = new SingleLayer(7, 10, new Neuro.MLP.ActivateFunction.Relu(), new Random());

MultiLayer mLayer = new MultiLayer(layers);

DifferintiableLearningConfig config = new DifferintiableLearningConfig(new Neuro.MLP.ErrorFunction.HalfEuclid());

config.Step = 0.1;

config.OneImageMinError = 0.01;

config.MinError = 0.4;

config.MinChangeError = 0.0000001;

config.UseRandomShuffle = true;

config.MaxEpoch = 10000;

SimpleBackPropogation learn = new SimpleBackPropogation(config);

MultiLayerNeuralNetwork network = new MultiLayerNeuralNetwork(mLayer, learn);

network.DefaultInitialise();

network.Train(learningSet, testingSet);

network.Save("fsdf" +".json");

. , 16 . , 16 . 6 . 24 , RGB , 24 . , . 561 0,4543. , 15% , 97%. 600 .

:

ISingleLayer<double>[] layers = new ISingleLayer<double>[2];

layers[0] = new SingleLayer(24, 6, new Neuro.MLP.ActivateFunction.Sigmoid(), new Random());

layers[1] = new SingleLayer(6, 16, new Neuro.MLP.ActivateFunction.Relu(), new Random());

MultiLayer mLayer = new MultiLayer(layers);

DifferintiableLearningConfig config = new DifferintiableLearningConfig(new Neuro.MLP.ErrorFunction.HalfEuclid());

config.Step = 0.1;

config.OneImageMinError = 0.01;

config.MinError = 0.5;

config.MinChangeError = 0.0000001;

config.UseRandomShuffle = true;

config.MaxEpoch = 10000;

SimpleBackPropogation learn = new SimpleBackPropogation(config);

MultiLayerNeuralNetwork network = new MultiLayerNeuralNetwork(mLayer, learn);

network.DefaultInitialise();

network.Train(learningSet, testingSet);

network.Save("fsdf" +".json");

( ():

 

3.5:

, , : , , , .

, .

1) System.Drawing.Bitmap. ..

2) . .

3) . . . . .

4) , , . , .

5) ,

6) , , .

7)

8) n x m, , 10 x 10, .

9) .

10)

11) .

12) , .

13) {' ','#','(',')','\','/','|','_','-','0',\n}, # .

14) {, , , , , , ,,, , , , , , , }

 

 

:

:

1 : .

, . 2. .

2 :

. . . , 2.1. . , 2.2. 3 7. .

2.1 2 .

2.2 - . 2, , 2.

 

3 : . .

4 : . . . . .

 

5 : GO.

. . .

 

6 : .

.

7 : .

, .

:

, , , .

 

.:

, 1, .( 3.6)

, 5. . .( 3.7)

, 4. , .

, 2. , .

3.6:

 

3.7:

:

C1

2

3

4

5 5

6

7 .

1 1

2 4

3 2

4 7

5 . 5 .

6 2.2 , 2

7 ( 3 6 )

 

:

:

, , , .

, BitmapWorker.

System.Drawing.Bitmap

, , . , , . , Image System.Windows.Forms.PictureBox, . .

, , . Compute MultiLayerNeyralNetwork, .

16 , , RGB . , char.

. \n , , , , \n. , Model.

:

ColorTo , 24 8 .

ColorToGray ( ). , ColorTo

ut .

RecalculateSize , .

Resize

Round , , .

 

Zoom

Colors . , .

DefaultZoom , . , ( 1600 , , , )

Doublicate , . , , , .

3.7: .

ImagePath , .

Images < , >

IsChanged , . . .

SixteenColorMode . . , windows , .

3.8:

 

Text . , .

UsePreworking . , . . , , .

3.9: .

WidthValue , .

. , , .

Content , , .

ChangeImage . . , .

ClearContent . .

PutImage , , . , , .

AssignEmptySimbol AssignFillSymbol . , .

FromColor ToColor RGB , .

 

3.10: # █ .

.

 

 

:

 

 

:

, 600

#

:

. .

,

, . , . . , - .

, . , .

. , -, , - , , , . .

.

4.1: -

4.2:

4.3:

4.4: .

4.5: , .

 

 

. , , . : , , .

ascii art. , , , . , . . : .

ascii art. , , , , .

, , . , . , .

, , .

 

:

1) .. : ,

2) . habrahabr.

3) .. . . , , 2003.

 

[DataContract]

public class SimpleBackPropogation: DifferintiableLearningConfig, ILearn<double>

{

public SimpleBackPropogation(DifferintiableLearningConfig config)

: base(config)

{

 

}

 

/// <summary>

///

/// </summary>

/// <param name="network"></param>

/// <param name="data"></param>

/// <param name="check"></param>

public void Train(INeuralNetwork<double> network, IList<NeuroImage<double>> data, IList<NeuroImage<double>> check)

{

//

MultiLayer net = network.Layer as MultiLayer;

this.CurrentEpoch = 0;

this.Succes = false;

double lastError = 0;

double[][] y = new double[data.Count][];

 

do

{

lastError = CurrentError;

//

if (UseRandomShuffle == true)

{

RandomShuffle(ref data);

}

for (int curr = 0; curr < data.Count; curr++)

{

 

//

y[curr] = network.Compute(data[curr].Input);

 

// , ,

// .

//

if (Compare(data[curr].Output, network.Layer[net.Layer.Length - 1].LastPulse) == true)

{

continue;

}

 

//

OutputLayerError(data, net, curr);

 

//

HiddenlayerError(net);

 

//

ModifyParametrs(data, net, curr);

 

}

 

//

ComputeError(data, network,y);

 

//

ComputeRegularizationError(data, net);

 

 

System.Console.WriteLine("Eposh #" + CurrentEpoch.ToString() +

" finished; current error is " + CurrentError.ToString()

);

CurrentEpoch++;

}

while (CurrentEpoch < MaxEpoch && CurrentError > MinError &&

Math.Abs(CurrentError - lastError) > MinChangeError);

 

ComputeRecognizeError(check, network);

 

 

}

 

private void ComputeRecognizeError(IList<NeuroImage<double>> check, INeuralNetwork<double> network)

{

int accept = 0;

double LastError = 0;

RecogniseError = 0;

for (int i = 0; i < check.Count; i++)

{

double[] realOutput = network.Compute(check[i].Input);

double[] result = new double[realOutput.Length];

LastError = ErrorFunction.Compute(check[i].Output, realOutput);

RecogniseError += LastError;

double max = realOutput.Max();

int index = realOutput.ToList().IndexOf(max);

result[index] = 1;

if (ArrayCompare(result, check[i].Output) == true)

{

accept++;

}

}

RecognisePercent = (double)accept / (double)check.Count;

RecogniseError /= 2;

}

 

public static bool ArrayCompare(double[] a, double[] b)

{

if (a.Length == b.Length)

{

for (int i = 0; i < a.Length; i++)

{

if (a[i]!= b[i]) { return false; };

}

return true;

}

return false;

}

 

/// <summary>

///

/// </summary>

/// <param name="data"> </param>

/// <param name="net"></param>

private void ComputeRegularizationError(IList<NeuroImage<double>> data, MultiLayer net)

{

if (Math.Abs(Regularization - 0d) > Double.Epsilon)

{

double reg = 0;

for (int layerIndex = 0; layerIndex < net.Layer.Length; layerIndex++)

{

for (int neuronIndex = 0; neuronIndex < net.Layer[layerIndex].Neuron.Length; neuronIndex++)

{

for (int weightIndex = 0; weightIndex < net.Layer[layerIndex].Neuron[neuronIndex].Weights.Length; weightIndex++)

{

reg += net.Layer[layerIndex].Neuron[neuronIndex].Weights[weightIndex] *

net.Layer[layerIndex].Neuron[neuronIndex].Weights[weightIndex];

}

}

}

CurrentError += Regularization * reg / (2 * data.Count);

}

}

 

/// <summary>

///

/// </summary>

/// <param name="data"> </param>

/// <param name="network"></param>

private void ComputeError(IList<NeuroImage<double>> data, INeuralNetwork<double> network, double[][] realOutput)

{

int accept = 0;

double LastError = 0;

CurrentError = 0;

for (int i = 0; i < data.Count; i++)

{

LastError = ErrorFunction.Compute(data[i].Output, realOutput[i]);

CurrentError += LastError;

}

TeachPercent = (double)accept / (double)data.Count;

CurrentError /= 2;

}

 

 

/// <summary>

///

/// </summary>

/// <param name="data"> </param>

/// <param name="net"></param>

/// <param name="curr"> </param>

private void ModifyParametrs(IList<NeuroImage<double>> data, MultiLayer net, int curr)

{

for (int i = net.Layer.Length - 1; i >= 0; i--)//

{

for (int j = 0; j < net[i].Neuron.Length; j++)//

{

double temp = Step * net[i].Neuron[j].CurrentError

* net[i].Neuron[j].ActivationFunction.Derivative(net[i].Neuron[j].LastPulse);

net[i].Neuron[j].Offset += temp;

for (int k = 0; k < net[i].Neuron[j].Weights.Length; k++)//

{

 

if (i == 0)

{

net[i].Neuron[j].Weights[k] -= temp

* data[curr].Input[k] + Regularization * net[i].Neuron[j].Weights[k] / data.Count;

}

else

{

net[i].Neuron[j].Weights[k] -= temp

* net[i - 1].Neuron[k].LastState + Regularization * net[i].Neuron[j].Weights[k] / data.Count;

}

}

}

}

}

 

/// <summary>

///

/// </summary>

/// <param name="net"></param>

private static void HiddenlayerError(MultiLayer net)

{

for (int k = net.Length - 2; k >= 0; k--)

{

//

for (int j = 0; j < net[k].Neuron.Length; j++)

{

net.Layer[k].Neuron[j].CurrentError = 0;

//

for (int i = 0; i < net[k + 1].Neuron.Length; i++)

{

//errorj = sum_for_i(errori * F'(Sj) * wij)

net.Layer[k].Neuron[j].CurrentError += net.Layer[k + 1].Neuron[i].CurrentError

* net.Layer[k + 1].Neuron[i].Weights[j] *

net.Layer[k + 1].Neuron[i].ActivationFunction.Derivative(net.Layer[k + 1].Neuron[i].LastPulse);

}

 

}

}

}

 

/// <summary>

///

/// </summary>

/// <param name="data"> </param>

/// <param name="net"></param>

/// <param name="curr"> </param>

///

private void OutputLayerError(IList<NeuroImage<double>> data, MultiLayer net, int curr)

{

int last = net.Layer.Length - 1;

//

for (int j = 0; j < net.Layer[last].Neuron.Length; j++)

{

net.Layer[last].Neuron[j].CurrentError = 0;

net.Layer[last].Neuron[j].CurrentError = ErrorFunction.Derivative(net.Layer[last].Neuron[j].LastState, data[curr].Output[j]);

}

}

 

private void RandomShuffle(ref IList<NeuroImage<double>> data)

{

Random gen = new Random();

int ind1, ind2;

for (int i = 0; i < data.Count; i++)

{

ind1 = gen.Next(0, data.Count);

ind2 = gen.Next(0, data.Count);

Swap(data, ind1, ind2);

}

}

 

private void Swap(IList<NeuroImage<double>> data, int ind1, int ind2)

{

NeuroImage<double> temp = data[ind1];

data[ind1] = data[ind2];

data[ind2] = temp;

}

 

private bool Compare(double[] p1, double[] p2)

{

double error = 0.0;

for (int i = 0; i < p1.Length; i++)

{

error += (p1[i] - p2[i]) * (p1[i] - p2[i]);

}

error /= 2;

if (error < OneImageMinError)

return true;

else

return false;

}

 

 

}

 

 

:

private void cloudButton1_Click(object sender, EventArgs e)

{

UpdateParametrizedSymbol();

toolStripProgressBar1.ProgressBar.Show();

Application.DoEvents();

source.Colors.Clear();

source.Text = null;

Task.Run(

() =>

{

Bitmap image = new Bitmap(pictureBox1.Image);

if (System.IO.File.Exists("SymbolRecognise.json") == false)

{

MessageBox.Show("! SymbolRecognise.json");

return;

}

if (System.IO.File.Exists("ColorRecognise.json") == false)

{

MessageBox.Show("! ColorRecognise.json");

return;

}

var SymbolNetwork = Neuro.MLP.MultiLayerNeuralNetwork.Load("SymbolRecognise.json");

var ColorNetwork = Neuro.MLP.MultiLayerNeuralNetwork.Load("ColorRecognise.json");

 

System.Drawing.Size blockSize = new Size(10, 10);

System.Drawing.Size metrics = Work.BitmapWorker.Round(image.Size, blockSize);

if (source.DefaultZoom == true)

{

 

metrics = Work.BitmapWorker.RecalculateSize(image.Size, source.WidthValue);

image = Work.BitmapWorker.Zoom(image, metrics);

}

if (image.Height % blockSize.Height!= 0)

{

metrics = Work.BitmapWorker.Round(image.Size, blockSize);

image = Work.BitmapWorker.Resize(image, metrics);

}

 

int blockPerLine = Convert.ToInt32((float)image.Width / (float)blockSize.Width);

int BlockAmount = Convert.ToInt32((float)image.Width / (float)blockSize.Width * (float)image.Height / (float)blockSize.Height);

double[] input = new double[100];

Bitmap ImageToEncoding;

if (source.UsePreworking == true)

ImageToEncoding = Work.BitmapWorker.ColorToGray(image);

else

ImageToEncoding = image;

Bitmap temp, encodeTemp;

Rectangle rect;

for (int i = 0; i < BlockAmount; i++)

{

rect = GetBlockRect(image, i);

temp = Work.BitmapWorker.Cut(image, rect);

encodeTemp = Work.BitmapWorker.Cut(ImageToEncoding, rect);

 

if (source.SixteenColorMode == true)

{

 

source.Colors.Add(ColorNetwork.Compute(temp));

}

else

{

source.Colors.Add(temp.GetPixel(5, 5));

}

input = CreateInput(encodeTemp);

 

var res = SymbolNetwork.Compute(input);

double max = res.Max();

int index = res.ToList().IndexOf(max);

source.Text += source.Images[index];

 

if (source.Doublicate)

{

source.Text += source.Images[index];

source.Colors.Add(source.Colors.Last());

}

 

if (i % blockPerLine == blockPerLine - 1)

source.Text += '\n';

}

Form2 dlg = new Form2(source.Text, source.Colors);

 

 

toolStripProgressBar1.ProgressBar.InvokeIfRequired(() =>

{

// Do anything you want with the control here

toolStripProgressBar1.ProgressBar.Hide();

});

 

dlg.ShowDialog();

 

}

);

}





:


: 2017-02-11; !; : 495 |


:

:

- - , .
==> ...

1695 - | 1644 -


© 2015-2024 lektsii.org - -

: 0.569 .