我一直在努力解决我现在遇到的问题,而我却找不到解决方案。
我的问题是我有一个调用DLL的应用程序(Metatrader 4是应用程序)。 DLL从Metatrader接收数组和整数,进行大量计算,并向Metatrader返回一个double值。
C#DLL如下所示:
using System;
using System.Collections.Generic;
using System.Text;
using RGiesecke.DllExport;
using System.Runtime.InteropServices;
using System.Windows.Forms;
using System.Linq;
using System.Threading;
using Encog;
using Encog.Neural.Networks;
using Encog.Neural.Networks.Layers;
using Encog.Neural.Networks.Training;
using Encog.Neural.Networks.Training.Propagation.Resilient;
using Encog.Engine.Network.Activation;
using Encog.ML.Data;
using Encog.ML.Data.Basic;
using Encog.App.Quant.Indicators;
using Encog.App.Quant;
using Encog.Neural.NeuralData;
using System.Data;
using System.ComponentModel;
namespace NN_1_DLL
{
public static class UnmanagedExports
{
static DataTable dt;
static DataGridView gridview;
static Form frm;
static double NeuralOutput = 0;
static double LowNormalize;
static double HighNormalize;
static double dataHigh;
static double dataLow;
static int NNInputs;
static int NNOutputs;
static double[] trainingData;
static double[] neuralInput;
static int trainingDataSize;
static int trainingSets;
static int epochMax;
static int hiddenLayerNeurons;
static double[][] TRAINING_INPUT;
static double[][] TRAINING_OUTPUT;
static double[] NEURAL_INPUT;
static bool isShowGUI;
static bool FormInitiated = false;
public static void GUI()
{
int i, j;
dt = new DataTable("Table");
for (i = 0; i < NNInputs; i++)
{
dt.Columns.Add("Input " + i, typeof(double));
}
dt.Columns.Add("Output", typeof(double));
for (i = 0; i < TRAINING_INPUT.GetLength(0); i++)
{
DataRow dataRow = dt.NewRow();
for (j = 0; j < TRAINING_INPUT[i].GetLength(0); j++)
{
//dataRow["Input " + j] = TRAINING_INPUT[i][j];
dataRow["Input " + j] = ((((dataLow - dataHigh) * TRAINING_INPUT[i][j] - HighNormalize * dataLow + dataHigh * LowNormalize) / (LowNormalize - HighNormalize)));
}
//dataRow["Output"] = TRAINING_OUTPUT[i][0];
dataRow["Output"] = ((((dataLow - dataHigh) * TRAINING_OUTPUT[i][0] - HighNormalize * dataLow + dataHigh * LowNormalize) / (LowNormalize - HighNormalize)));
dt.Rows.Add(dataRow);
}
if (FormInitiated == false)
{
frm = new Form();
frm.Name = "frm";
frm.ControlBox = false;
frm.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedSingle;
frm.BackColor = System.Drawing.Color.LightGray;
frm.Size = new System.Drawing.Size(800, 600);
frm.Show();
gridview = new DataGridView();
gridview.Name = "gridview";
gridview.Height = 600;
gridview.Width = 800;
frm.Controls.Add(gridview);
FormInitiated = true;
}
gridview.DataSource = dt;
}
static void PrepareData()
{
double[] array_input = new double[NNInputs];
double[] array_output = new double[NNOutputs];
int i = 0;
int j = 0;
int refpoint = 0;
//PREPARE DATA FOR NEURAL NETWORK
dataHigh = Math.Max(trainingData.Max(), neuralInput.Max());
dataLow = Math.Min(trainingData.Min(), neuralInput.Min());
for (i = 0; i < trainingData.GetLength(0); i++)
{
trainingData[i] = (((trainingData[i] - dataLow) / (dataHigh - dataLow)) * (HighNormalize - LowNormalize) + LowNormalize);
}
NEURAL_INPUT = new double[neuralInput.GetLength(0)];
for (i = 0; i < neuralInput.GetLength(0); i++)
{
NEURAL_INPUT[i] = (((neuralInput[i] - dataLow) / (dataHigh - dataLow)) * (HighNormalize - LowNormalize) + LowNormalize);
}
TRAINING_INPUT = new double[trainingSets][];
TRAINING_OUTPUT = new double[trainingSets][];
for (i = 0; i < trainingSets; i++)
{
refpoint = (i * (NNInputs + NNOutputs));
for (j = refpoint; j < refpoint + NNInputs; j++)
{
array_input[j - refpoint] = trainingData[j];
}
refpoint = refpoint + NNInputs;
for (j = refpoint; j < refpoint + NNOutputs; j++)
{
array_output[j - refpoint] = trainingData[j];
}
TRAINING_INPUT[i] = array_input;
TRAINING_OUTPUT[i] = array_output;
array_input = new double[NNInputs];
array_output = new double[NNOutputs];
}
}
static void ComputeNN()
{
int i = 0;
BasicNetwork network = new BasicNetwork();
network.AddLayer(new BasicLayer(new ActivationTANH(), true, NNInputs));
network.AddLayer(new BasicLayer(new ActivationTANH(), true, hiddenLayerNeurons));
network.AddLayer(new BasicLayer(new ActivationTANH(), true, NNOutputs));
network.Structure.FinalizeStructure();
network.Reset();
IMLDataSet trainingSet = new BasicMLDataSet(TRAINING_INPUT, TRAINING_OUTPUT);
ITrain train = new ResilientPropagation(network, trainingSet);
int epoch = 1;
do
{
train.Iteration();
epoch++;
} while ((epoch < epochMax));
INeuralData input = new Encog.Neural.Data.Basic.BasicNeuralData(NNInputs);
for (i = 0; i < NNInputs; i++)
{
input[i] = NEURAL_INPUT[i];
}
IMLData output = network.Compute(input);
NeuralOutput = ((((dataLow - dataHigh) * output[0] - HighNormalize * dataLow + dataHigh * LowNormalize) / (LowNormalize - HighNormalize)));
}
[DllExport("NNExportDLL", CallingConvention = CallingConvention.StdCall)]
static double NNExportDLL([MarshalAs(UnmanagedType.LPArray, SizeParamIndex = 1)] double[] training_data, int training_data_size, int inputs, int outputs, int training_sets, [MarshalAs(UnmanagedType.LPArray, SizeParamIndex = 2)] double[] neural_input, int epoch_Max, int hidden_Layer_Neurons, double Low_Normalize, double High_Normalize, int ShowGUI)
{
//Define some global variables
LowNormalize = new double();
HighNormalize = new double();
NNInputs = new int();
NNOutputs = new int();
trainingData = new double[training_data_size];
trainingDataSize = new int();
trainingSets = new int();
epochMax = new int();
hiddenLayerNeurons = new int();
neuralInput = new double[inputs];
LowNormalize = Low_Normalize;
HighNormalize = High_Normalize;
NNInputs = inputs;
NNOutputs = outputs;
trainingData = training_data;
trainingDataSize = training_data_size;
trainingSets = training_sets;
epochMax = epoch_Max;
hiddenLayerNeurons = hidden_Layer_Neurons;
neuralInput = neural_input;
if (ShowGUI == 1) { isShowGUI = true; } else { isShowGUI = false; }
if (isShowGUI == true)
{
PrepareData();
ComputeNN();
GUI();
}
if (isShowGUI == false)
{
PrepareData();
ComputeNN();
}
return (NeuralOutput);
}
}
}
我必须使用线程,因为GUI在计算期间冻结,GUI()绝对没有响应。我已经尝试了backgroundworker类,但DLL在函数完成之前返回值,GUI仍然冻结。
GUI()是根据PrepareData()和ComputeNN()函数计算的数据构建的,因此必须在执行GUI()函数之前先执行它们。在函数将双值返回给Metatrader之前,必须完成PrepareData()和ComputeNN()函数。
我对DLL中的线程和GUI完全没有经验,所以不胜感激任何建议!
也许我正在尝试做一些不可能或愚蠢的事情。 GUI的全部原因是让我看看数据是否按预期计算(更多的是我调试代码的方式)。稍后我将添加图表等,以便可视化培训过程/数据。如果有更好的方法,请告诉我!*
答案 0 :(得分:2)
尝试使用GUI线程的调度程序:
double d = 0d;
App.Current.Dispatcher.BeginInvoke((Action)(() =>{
d = callYourMethod();
}), System.Windows.Threading.DispatcherPriority.Normal);
您可以查看此link以获取有关WPF调度程序的教程。
再试一次BackgroundWorker。声明这些静态字段:
private static BackgroundWorker prepareDataBW;
private static BackgroundWorker computeNnBW;
private static bool isFinishedPrepareData;
private static bool isFinishedComputeNN;
private static Action BackgroundWorkerFinishedAction;
和这些静态方法(事件处理程序):
static void prepareDataBW_RunWorkerCompleted(object sender, RunWorkerCompletedEventArgs e)
{
isFinishedPrepareData = true;
BackgroundWorkerFinishedAction();
}
static void computeNnBW_RunWorkerCompleted(object sender, RunWorkerCompletedEventArgs e)
{
isFinishedComputeNN = true;
BackgroundWorkerFinishedAction();
}
static void BackgroundWorkerFinishedAction()
{
if(isFinishedComputeNN && isFinishedPrepareData && isShowGUI)
{
UpdateUI();
}
else if(isFinishedComputeNN && isFinishedPrepareData)
{
ShowGUI();
}
}
然后代替:
if (ShowGUI == 1) //Show GUI
{
PrepareData();
ComputeNN();
ShowGUI();
}
else
{
PrepareData();
ComputeNN();
}
尝试这样的事情:
prepareDataBW = new BackgroundWorker();
prepareDataBW.DoWork += delegate {
prepareDataBW();
};
prepareDataBW.RunWorkerCompleted += new RunWorkerCompletedEventHandler(prepareDataBW_RunWorkerCompleted);
computeNnBW = new BackgroundWorker();
computeNnBW.DoWork += delegate {
computeNN();
};
computeNnBW.RunWorkerCompleted += new RunWorkerCompletedEventHandler(computeNnBW_RunWorkerCompleted);
BackgroundWorkerFinishedAction += new Action(BackgroundWorkerFinishedAction);
prepareDataBW.RunWorkerAsync();
computeNnBW.RunWorkerAsync();
根据您的所有代码,为了测试您的应用,您应该尝试这样做:
using System;
using System.Collections.Generic;
using System.Text;
using RGiesecke.DllExport;
using System.Runtime.InteropServices;
using System.Windows.Forms;
using System.Linq;
using System.Threading;
using Encog;
using Encog.Neural.Networks;
using Encog.Neural.Networks.Layers;
using Encog.Neural.Networks.Training;
using Encog.Neural.Networks.Training.Propagation.Resilient;
using Encog.Engine.Network.Activation;
using Encog.ML.Data;
using Encog.ML.Data.Basic;
using Encog.App.Quant.Indicators;
using Encog.App.Quant;
using Encog.Neural.NeuralData;
using System.Data;
using System.ComponentModel;
namespace NN_1_DLL
{
public static class UnmanagedExports
{
static DataTable dt;
static DataGridView gridview;
static Form frm;
static double NeuralOutput = 0;
static double LowNormalize;
static double HighNormalize;
static double dataHigh;
static double dataLow;
static int NNInputs;
static int NNOutputs;
static double[] trainingData;
static double[] neuralInput;
static int trainingDataSize;
static int trainingSets;
static int epochMax;
static int hiddenLayerNeurons;
static double[][] TRAINING_INPUT;
static double[][] TRAINING_OUTPUT;
static double[] NEURAL_INPUT;
static bool isShowGUI;
static bool FormInitiated = false;
public static BackgroundWorker bw = new BackgroundWorker();
public static void Main()
{
initForm();
bw.DoWork += delegate
{
double d = NNExportDLL(
new double[] { 2d, 3.4, 5d }, 3, 2, 2, 1, new double[] { 2d, 3d, 5d, 6d }, 3, 100, 4d, 8d, 1);
Console.Write(d);
};
Application.Run(frm);
}
public static void initForm()
{
if (FormInitiated == false)
{
frm = new Form();
frm.Name = "frm";
frm.ControlBox = false;
frm.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedSingle;
frm.BackColor = System.Drawing.Color.LightGray;
frm.Size = new System.Drawing.Size(800, 700);
//frm.ShowDialog();
FlowLayoutPanel flp = new FlowLayoutPanel();
flp.Dock = DockStyle.Fill;
frm.Controls.Add(flp);
gridview = new DataGridView();
gridview.Name = "gridview";
gridview.Height = 600;
gridview.Width = 800;
flp.Controls.Add(gridview);
Button b = new Button();
b.Text = "Refresh";
b.Click += new EventHandler(b_Click);
b.Dock = DockStyle.Bottom;
flp.Controls.Add(b);
FormInitiated = true;
}
}
static void b_Click(object sender, EventArgs e)
{
bw.RunWorkerCompleted += new RunWorkerCompletedEventHandler(bw_RunWorkerCompleted);
bw.RunWorkerAsync();
}
static void bw_RunWorkerCompleted(object sender, RunWorkerCompletedEventArgs e)
{
bw.RunWorkerCompleted -= new RunWorkerCompletedEventHandler(bw_RunWorkerCompleted);
RefreshData();
}
static void RefreshData()
{
int i, j;
dt = new DataTable("Table");
for (i = 0; i < NNInputs; i++)
{
dt.Columns.Add("Input " + i, typeof(double));
}
dt.Columns.Add("Output", typeof(double));
for (i = 0; i < TRAINING_INPUT.GetLength(0); i++)
{
DataRow dataRow = dt.NewRow();
for (j = 0; j < TRAINING_INPUT[i].GetLength(0); j++)
{
//dataRow["Input " + j] = TRAINING_INPUT[i][j];
dataRow["Input " + j] = ((((dataLow - dataHigh) * TRAINING_INPUT[i][j] - HighNormalize * dataLow + dataHigh * LowNormalize) / (LowNormalize - HighNormalize)));
}
//dataRow["Output"] = TRAINING_OUTPUT[i][0];
dataRow["Output"] = ((((dataLow - dataHigh) * TRAINING_OUTPUT[i][0] - HighNormalize * dataLow + dataHigh * LowNormalize) / (LowNormalize - HighNormalize)));
dt.Rows.Add(dataRow);
}
gridview.DataSource = dt;
}
static void PrepareData()
{
double[] array_input = new double[NNInputs];
double[] array_output = new double[NNOutputs];
int i = 0;
int j = 0;
int refpoint = 0;
//PREPARE DATA FOR NEURAL NETWORK
dataHigh = Math.Max(trainingData.Max(), neuralInput.Max());
dataLow = Math.Min(trainingData.Min(), neuralInput.Min());
for (i = 0; i < trainingData.GetLength(0); i++)
{
trainingData[i] = (((trainingData[i] - dataLow) / (dataHigh - dataLow)) * (HighNormalize - LowNormalize) + LowNormalize);
}
NEURAL_INPUT = new double[neuralInput.GetLength(0)];
for (i = 0; i < neuralInput.GetLength(0); i++)
{
NEURAL_INPUT[i] = (((neuralInput[i] - dataLow) / (dataHigh - dataLow)) * (HighNormalize - LowNormalize) + LowNormalize);
}
TRAINING_INPUT = new double[trainingSets][];
TRAINING_OUTPUT = new double[trainingSets][];
for (i = 0; i < trainingSets; i++)
{
refpoint = (i * (NNInputs + NNOutputs));
for (j = refpoint; j < refpoint + NNInputs; j++)
{
array_input[j - refpoint] = trainingData[j];
}
refpoint = refpoint + NNInputs;
for (j = refpoint; j < refpoint + NNOutputs; j++)
{
if (trainingData.Length - 1 >= j)
{
array_output[j - refpoint] = trainingData[j];
}
}
TRAINING_INPUT[i] = array_input;
TRAINING_OUTPUT[i] = array_output;
array_input = new double[NNInputs];
array_output = new double[NNOutputs];
}
}
static void ComputeNN()
{
int i = 0;
BasicNetwork network = new BasicNetwork();
network.AddLayer(new BasicLayer(new ActivationTANH(), true, NNInputs));
network.AddLayer(new BasicLayer(new ActivationTANH(), true, hiddenLayerNeurons));
network.AddLayer(new BasicLayer(new ActivationTANH(), true, NNOutputs));
network.Structure.FinalizeStructure();
network.Reset();
IMLDataSet trainingSet = new BasicMLDataSet(TRAINING_INPUT, TRAINING_OUTPUT);
ITrain train = new ResilientPropagation(network, trainingSet);
int epoch = 1;
do
{
train.Iteration();
epoch++;
} while ((epoch < epochMax));
INeuralData input = new Encog.Neural.Data.Basic.BasicNeuralData(NNInputs);
for (i = 0; i < NNInputs; i++)
{
input[i] = NEURAL_INPUT[i];
}
IMLData output = network.Compute(input);
NeuralOutput = ((((dataLow - dataHigh) * output[0] - HighNormalize * dataLow + dataHigh * LowNormalize) / (LowNormalize - HighNormalize)));
}
[DllExport("NNExportDLL", CallingConvention = CallingConvention.StdCall)]
static double NNExportDLL(
[MarshalAs(UnmanagedType.LPArray, SizeParamIndex = 1)] double[] training_data,
int training_data_size, int inputs, int outputs, int training_sets,
[MarshalAs(UnmanagedType.LPArray, SizeParamIndex = 2)] double[] neural_input,
int epoch_Max,
int hidden_Layer_Neurons,
double Low_Normalize,
double High_Normalize,
int ShowGUI)
{
//Define some global variables
LowNormalize = new double();
HighNormalize = new double();
NNInputs = new int();
NNOutputs = new int();
trainingData = new double[training_data_size];
trainingDataSize = new int();
trainingSets = new int();
epochMax = new int();
hiddenLayerNeurons = new int();
neuralInput = new double[inputs];
LowNormalize = Low_Normalize;
HighNormalize = High_Normalize;
NNInputs = inputs;
NNOutputs = outputs;
trainingData = training_data;
trainingDataSize = training_data_size;
trainingSets = training_sets;
epochMax = epoch_Max;
hiddenLayerNeurons = hidden_Layer_Neurons;
neuralInput = neural_input;
if (ShowGUI == 1) { isShowGUI = true; } else { isShowGUI = false; }
PrepareData();
ComputeNN();
return (NeuralOutput);
}
}
}
主要问题实际上是您尝试在不使用Application.Run的情况下运行frm.show()。
另一个编辑:不要忘记用自己的逻辑替换bw.DoWork委托中的内容。