Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Latest commit

 

History

History
History
229 lines (216 loc) · 8.2 KB

File metadata and controls

229 lines (216 loc) · 8.2 KB
Copy raw file
Download raw file
Open symbols panel
Edit and raw actions
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
package decimator;
import java.util.Arrays;
import Acquisition.DaqSourceInfo;
import Filters.Filter;
import Filters.FilterBand;
import Filters.FilterMethod;
import Filters.FilterParams;
import Filters.FilterType;
import Filters.interpolate.Interpolator;
import Filters.interpolate.PolyInterpolator0;
import Filters.interpolate.PolyInterpolator1;
import Filters.interpolate.PolyInterpolator2;
import PamDetection.RawDataUnit;
import PamUtils.PamUtils;
/**
* Job to run the actual decimation. Separate out from
* DecimatorProcess so that it can be used elsewhere.
* <br> Note that this can both decimate and upsample. If decimating, filtering
* takes place before data are copied to output array. If upsampling, filtering takes
* place AFTER data are copied to the output array.
* @author dg50
*
*/
public class DecimatorWorker {
private Filter filters[];
private double[][] filteredData;
private double[] pickSample;
private int[] putSample;
private long[] totalPutSamples;
private double[][] outputData;
private long[] outputStartMillis;
private int channelMap;
private double inputRate;
private double outputRate;
private Interpolator[] interpolators;
private DecimatorParams decimatorParams;
// private DaqSourceInfo[] daqSourceInfos;
/**
* Make a decimator worker with given filter params, channel map and input and output rates.
* Channels not in the map will be ignored.
* @param decimatorParams Filter parameters for Decimator filter
* @param channelMap Channel map (channels not in map will return null)
* @param inputRate input sample rate
* @param outputRate output sample rate
*/
public DecimatorWorker(DecimatorParams decimatorParams, int channelMap, double inputRate, double outputRate) {
this.decimatorParams = decimatorParams;
this.channelMap = channelMap;
this.inputRate = inputRate;
this.outputRate = outputRate;
createFilters();
}
/**
* Make a decimator / upsampler
* @param filterOrder Filter order to use (Butterworth low pass applied before decimation or after upsampling
* @param channelMap channel map
* @param inputRate input sample rate
* @param outputRate output sample rate
*/
public DecimatorWorker(int filterOrder, int channelMap, double inputRate, double outputRate) {
this.channelMap = channelMap;
this.inputRate = inputRate;
this.outputRate = outputRate;
decimatorParams = new DecimatorParams();
decimatorParams.filterParams = new FilterParams();
decimatorParams.filterParams.filterBand = FilterBand.LOWPASS;
decimatorParams.filterParams.filterType = FilterType.BUTTERWORTH;
decimatorParams.filterParams.filterOrder = filterOrder;
decimatorParams.filterParams.lowPassFreq = (float) (Math.min(inputRate, outputRate) / 2.);
createFilters();
}
/**
* Reset all counters and output buffers.
*/
public void reset() {
createFilters();
}
/**
* Make the decimator filters. If reducing frequency, then the filter
* is applied before decimation (obviously!) so is set up based on the
* input sample rate. If however the 'decimator' is being used to upsample,
* then filtering takes place AFTER the transfer of data to the output
* arrays, so fitlering is set up based on the output sample rate.
*/
protected void createFilters() {
int highestChan = PamUtils.getHighestChannel(channelMap);
filters = new Filter[highestChan+1];
filteredData = new double[highestChan+1][];
outputData = new double[highestChan+1][];
outputStartMillis = new long[highestChan+1];
interpolators = new Interpolator[highestChan+1];
// daqSourceInfos = new DaqSourceInfo[highestChan+1];
double fs = Math.max(inputRate, outputRate);
for (int i = 0; i <= highestChan; i++) {
if ((1<<i & channelMap) == 0) {
continue;
}
FilterMethod filterMethod = FilterMethod.createFilterMethod(fs, decimatorParams.filterParams);
filters[i] = filterMethod.createFilter(i);
filters[i].prepareFilter();
interpolators[i] = makeInterpolator(decimatorParams.interpolation);
}
pickSample = new double[highestChan+1];
putSample = new int[highestChan+1];
/**
* This is a funny one - if skipping the start of a file, then the
* first sample number may not be zero, but we won't know this until the
* first raw data arrive, so set this null and deal with it in process(RawDataUnit)
*/
totalPutSamples = null;//new long[highestChan+1];
}
private Interpolator makeInterpolator(int order) {
switch (order) {
case 0:
return new PolyInterpolator0();
case 1:
return new PolyInterpolator1();
case 2:
return new PolyInterpolator2();
default:
return new PolyInterpolator0();
}
}
// long lastSampDur;
/**
* Run the decimator on the input data, return null if it's not
* in the channel list. <br>Also sometimes null if decimation ratio is not an integer factor
* @param inputData
* @return a new data unit or null
*/
public RawDataUnit process(RawDataUnit inputData) {
if (totalPutSamples == null) {
/**
* Have to do this here, since the sample number of the first data unit may be
* >> 0 if we're skipping the start of a file, so need to apply a scaled
* version of this offset to the output data to get correct sample numbers.
*/
long firstSample = (long) (inputData.getStartSample() * outputRate / inputRate);
totalPutSamples = new long[putSample.length];
Arrays.fill(totalPutSamples, firstSample);
}
RawDataUnit retUnit = null;
int chanMap = inputData.getChannelBitmap();
if ((chanMap & channelMap) == 0) {
return null;
}
int chan = PamUtils.getSingleChannel(chanMap);
long nInputSamps = inputData.getSampleDuration();
if (inputRate > outputRate) { // decimation
if (filteredData[chan] == null || filteredData[chan].length != nInputSamps) {
filteredData[chan] = new double[(int) nInputSamps];
}
filters[chan].runFilter(inputData.getRawData(), filteredData[chan]);
}
else { // upsampling - filter later.
filteredData[chan] = inputData.getRawData();
}
/**
* When processing offline files, sample numbers can change, e.g. at end of file
* there will be a partially filled data unit, so size will get smaller, then
* if cursor moved, size will go back to default. So need to be able to handle
* varying block sizes.
*/
int nOutSamps;
if (outputData[chan] == null) {
/*
* number of output samples is rounded up so that one inputData always fits into
* one output decimated data. However, there will be rare occasions when this function returns null
*
*/
nOutSamps = (int) Math.ceil(inputData.getSampleDuration() * outputRate / inputRate);
outputData[chan] = new double[nOutSamps];
outputStartMillis[chan] = inputData.getTimeMilliseconds();
// DaqSourceInfo si = inputData.getDaqSourceInfo();
// if (si != null) {
// daqSourceInfos[chan] = new DaqSourceInfo(si.getSourceName(), si.getSeconds());
// }
}
else {
nOutSamps = outputData[chan].length;
}
Interpolator interpolator = interpolators[chan];
interpolator.setInputData(filteredData[chan]);
try {
while (pickSample[chan] < (nInputSamps-.5)) {
// outputData[chan][putSample[chan]++] = filteredData[chan][(int) Math.round(pickSample[chan])];
outputData[chan][putSample[chan]++] = interpolator.getOutputValue(pickSample[chan]);
totalPutSamples[chan]++;
if (putSample[chan] == nOutSamps) {
retUnit = new RawDataUnit(outputStartMillis[chan], inputData.getChannelBitmap(), totalPutSamples[chan]-nOutSamps, nOutSamps);
retUnit.setRawData(outputData[chan], true);
// retUnit.setDaqSourceInfo(daqSourceInfos[chan]);
outputData[chan] = new double[nOutSamps];
outputStartMillis[chan] = inputData.getTimeMilliseconds() + (long) (pickSample[chan] / inputRate * 1000.);
// DaqSourceInfo ds = inputData.getDaqSourceInfo();
// if (ds != null) {
// daqSourceInfos[chan] = ds.copy(ds.getSeconds() + pickSample[chan] / inputRate);
// }
putSample[chan] = 0;
}
pickSample[chan] += inputRate / outputRate;
}
}
catch (Exception e) {
e.printStackTrace();
}
pickSample[chan] -= nInputSamps; // ready for the next one - should be > 0.
/*
* If upsampling, need to run the upsample filter now
*/
if (retUnit != null && inputRate < outputRate) {
filters[chan].runFilter(retUnit.getRawData());
}
return retUnit;
}
}
Morty Proxy This is a proxified and sanitized view of the page, visit original site.