-
Notifications
You must be signed in to change notification settings - Fork 4
Expand file tree
/
Copy pathBaseFunctions.cs
More file actions
119 lines (92 loc) · 3.95 KB
/
BaseFunctions.cs
File metadata and controls
119 lines (92 loc) · 3.95 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
using BenchmarkDotNet.Attributes;
using BenchmarkDotNet.Configs;
using NumSharp.Core;
using Proxem.BlasNet;
using System;
using System.Collections.Generic;
using System.Text;
namespace Proxem.NumNet.Benchmark
{
[CoreJob]
//[CsvExporter]
[GroupBenchmarksBy(BenchmarkLogicalGroupRule.ByCategory)]
[CategoriesColumn]
public class BaseFunctionsBenchmarkMkl
{
private Array<float> numnet_1;
private Array<float> numnet_2;
private Array<float> numnet_flat_1;
private Array<float> numnet_flat_2;
//private NumPy np;
private NDArray numsharp_1;
private NDArray numsharp_2;
private NDArray numsharp_flat_1;
private NDArray numsharp_flat_2;
[Params(100, 500)]
public int N;
[GlobalSetup]
public void Setup()
{
// Launching mkl for NumNet (path might need to be change)
var path = "C:/data/dlls/mkl";
StartProvider.LaunchMklRt(1, path);
numnet_1 = NN.Random.Normal(0, 1, N, N);
numnet_2 = NN.Random.Normal(0, 1, N, N);
numnet_flat_1 = NN.Random.Normal(0, 1, N * N);
numnet_flat_2 = NN.Random.Normal(0, 1, N * N);
//np = new NumPy();
numsharp_1 = np.random.normal(0, 1, N, N).reshape(new Shape(N, N)); // need reshaping cause there's a bug in 'np.random.normal'
numsharp_2 = np.random.normal(0, 1, N, N).reshape(new Shape(N, N));
numsharp_flat_1 = np.random.normal(0, 1, N * N);
numsharp_flat_2 = np.random.normal(0, 1, N * N);
}
[BenchmarkCategory("Dot"), Benchmark]
public Array<float> NumNetDot() => NN.Dot(numnet_1, numnet_2);
[BenchmarkCategory("Dot"), Benchmark]
public NDArray NumSharpDot() => np.dot(numsharp_1, numsharp_2);
[BenchmarkCategory("Dot"), Benchmark]
public Array<float> NumNetDotFlat() => NN.Dot(numnet_flat_1, numnet_flat_2);
[BenchmarkCategory("Dot"), Benchmark]
public NDArray NumSharpDotFlat() => np.dot(numsharp_flat_1, numsharp_flat_2);
[BenchmarkCategory("Maths"), Benchmark]
public Array<float> NumNetLog() => NN.Log(numnet_1);
[BenchmarkCategory("Maths"), Benchmark]
public NDArray NumSharpLog() => np.log(numsharp_1);
[BenchmarkCategory("Operations"), Benchmark]
public Array<float> NumNetDiff() => numnet_1 - numnet_2;
[BenchmarkCategory("Operations"), Benchmark]
public NDArray NumSharpDiff() => numsharp_1 - numsharp_2;
[BenchmarkCategory("Operations"), Benchmark]
public Array<float> NumNetAdd() => numnet_1 + numnet_2;
[BenchmarkCategory("Operations"), Benchmark]
public NDArray NumSharpAdd() => numsharp_1 + numsharp_2;
[BenchmarkCategory("Operations"), Benchmark]
public Array<float> NumNetHadamard() => numnet_1 * numnet_2;
[BenchmarkCategory("Operations"), Benchmark]
public NDArray NumSharpHadamard() => numsharp_1 * numsharp_2;
[BenchmarkCategory("Operations"), Benchmark]
[Arguments(1.5f)]
[Arguments(-2.8f)]
public Array<float> NumNetScalarMul(float lambda) => numnet_1 * lambda;
[BenchmarkCategory("Operations"), Benchmark]
[Arguments(1.5f)]
[Arguments(-2.8f)]
public NDArray NumSharpScalarMul(float lambda) => numsharp_1 * lambda;
[BenchmarkCategory("Base"), Benchmark]
public Array<float> NumNetArgmax() => NN.Argmax(numnet_1);
[BenchmarkCategory("Base"), Benchmark]
public NDArray NumSharpArgmax() => np.amax(numsharp_1);
[BenchmarkCategory("Base"), Benchmark]
public void NumNetArgmaxAxis()
{
var a = NN.Argmax(numnet_1, 0);
var b = NN.Argmax(numnet_1, 1);
}
[BenchmarkCategory("Base"), Benchmark]
public void NumSharpArgmaxAxis()
{
np.amax(numsharp_1, 0);
np.amax(numsharp_1, 1);
}
}
}