Skip to content

Commit

Permalink
Merge branch 'schema_2.2.1'
Browse files Browse the repository at this point in the history
  • Loading branch information
Lawrence committed Mar 4, 2020
2 parents 4998690 + a55cd19 commit 929ba2f
Show file tree
Hide file tree
Showing 111 changed files with 3,571 additions and 889 deletions.
4 changes: 4 additions & 0 deletions +file/Dataset.m
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,10 @@
end
end

if source.isKey('required')
obj.required = strcmp(source('required'), 'true');
end

obj.isConstrainedSet = ~isempty(obj.type) && ~obj.scalar;

boundsKey = 'dims';
Expand Down
8 changes: 7 additions & 1 deletion +file/fillClass.m
Original file line number Diff line number Diff line change
Expand Up @@ -54,9 +54,15 @@
depnm = namespace.getFullClassName(parentName);
end

if isa(processed, 'file.Group')
classTag = 'types.untyped.GroupClass';
else
classTag = 'types.untyped.DatasetClass';
end

%% return classfile string
classDef = [...
'classdef ' name ' < ' depnm newline... %header, dependencies
'classdef ' name ' < ' depnm ' & ' classTag newline... %header, dependencies
'% ' upper(name) ' ' class.doc]; %name, docstr
propgroups = {...
@()file.fillProps(classprops, readonly, 'SetAccess=protected')...
Expand Down
4 changes: 2 additions & 2 deletions +file/fillExport.m
Original file line number Diff line number Diff line change
Expand Up @@ -18,15 +18,15 @@
bodystr = [bodystr {'fullpath = '''';'}];
end

for i=1:length(propnames)
for i = 1:length(propnames)
pnm = propnames{i};
pathProps = traverseRaw(pnm, raw);
prop = pathProps{end};
elideProps = pathProps(1:end-1);

%Construct elisions
elisions = cell(length(elideProps),1);
for j=1:length(elideProps)
for j = 1:length(elideProps)
elisions{j} = elideProps{j}.name;
end

Expand Down
12 changes: 6 additions & 6 deletions +tests/+system/DynamicTableTest.m
Original file line number Diff line number Diff line change
@@ -1,20 +1,20 @@
classdef DynamicTableTest < tests.system.RoundTripTest & tests.system.AmendTest
methods
function addContainer(~, file)
start_time = types.core.VectorData(...
start_time = types.hdmf_common.VectorData(...
'description', 'start_time',...
'data', 1:100);
stop_time = types.core.VectorData(...
stop_time = types.hdmf_common.VectorData(...
'description', 'stop_time',...
'data', 2:200);
colnames = {'start_time', 'stop_time', 'randomvalues'};
id = types.core.ElementIdentifiers(...
id = types.hdmf_common.ElementIdentifiers(...
'data', 1:100);

randcol = types.core.VectorData(...
randcol = types.hdmf_common.VectorData(...
'description', 'random data to be indexed into',...
'data', rand(500,1));
randidx = types.core.VectorIndex(...
randidx = types.hdmf_common.VectorIndex(...
'target', types.untyped.ObjectView('/intervals/trials/randomvalues'),...
'data', 5:5:500 - 1);

Expand All @@ -37,7 +37,7 @@ function appendContainer(testCase, file)
container.data = rand(500, 1); % new random values.
file.intervals_trials.colnames{end+1} = 'newcolumn';
file.intervals_trials.vectordata.set('newcolumn',...
types.core.VectorData(...
types.hdmf_common.VectorData(...
'description', 'newly added column',...
'data', 100:-1:1));
end
Expand Down
22 changes: 11 additions & 11 deletions +tests/+system/ElectricalSeriesIOTest.m
Original file line number Diff line number Diff line change
Expand Up @@ -17,24 +17,24 @@ function addContainer(testCase, file) %#ok<INUSL>
'location', 'tetrode location', ...
'device', devlink);

ettable = types.core.DynamicTable(...
ettable = types.hdmf_common.DynamicTable(...
'colnames', {'x', 'y', 'z', 'imp', 'location', 'filtering', 'group', 'group_name'},...
'id', types.core.ElementIdentifiers('data', 1:4),...
'x', types.core.VectorData('data', ones(4,1),...
'id', types.hdmf_common.ElementIdentifiers('data', 1:4),...
'x', types.hdmf_common.VectorData('data', ones(4,1),...
'description', 'the x coordinate of the channel location'),...
'y', types.core.VectorData('data', repmat(2, 4, 1),...
'y', types.hdmf_common.VectorData('data', repmat(2, 4, 1),...
'description', 'the y coordinate of the channel location'),...
'z', types.core.VectorData('data', repmat(3, 4, 1),...
'z', types.hdmf_common.VectorData('data', repmat(3, 4, 1),...
'description', 'the z coordinate of the channel location'),...
'imp', types.core.VectorData('data', ones(4,1),...
'imp', types.hdmf_common.VectorData('data', ones(4,1),...
'description', 'the impedance of the channel'),...
'location', types.core.VectorData('data', repmat({'CA1'},4,1),...
'location', types.hdmf_common.VectorData('data', repmat({'CA1'},4,1),...
'description', 'the location of channel within the subject e.g. brain region'),...
'filtering', types.core.VectorData('data', repmat({'none'},4,1),...
'filtering', types.hdmf_common.VectorData('data', repmat({'none'},4,1),...
'description', 'description of hardware filtering'),...
'group', types.core.VectorData('data', repmat(eglink,4,1),...
'group', types.hdmf_common.VectorData('data', repmat(eglink,4,1),...
'description', 'a reference to the ElectrodeGroup this electrodes is a part of'),...
'group_name', types.core.VectorData('data', repmat({egnm},4,1),...
'group_name', types.hdmf_common.VectorData('data', repmat({egnm},4,1),...
'description', 'name of the ElectrodeGroup this electrode is a part of'),...
'description', 'electrodes'...
);
Expand All @@ -45,7 +45,7 @@ function addContainer(testCase, file) %#ok<INUSL>
'data', [0:9;10:19], ...
'timestamps', (0:9) .', ...
'electrodes', ...
types.core.DynamicTableRegion(...
types.hdmf_common.DynamicTableRegion(...
'data', [0;2],...
'table', etReg,...
'description', 'the first and third electrodes'));
Expand Down
3 changes: 2 additions & 1 deletion +tests/+system/ImagingPlaneIOTest.m
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@ function addContainer(testCase, file) %#ok<INUSL>
'excitation_lambda', 6.28, ...
'imaging_rate', 2.718, ...
'indicator', 'GFP', ...
'location', 'somewhere in the brain');
'location', 'somewhere in the brain',...
'grid_spacing', []);
file.general_devices.set('imaging_device_1', dev);
file.general_optophysiology.set('imgpln1', ip);
end
Expand Down
10 changes: 7 additions & 3 deletions +tests/+system/NwbTestInterface.m
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,11 @@ function setupClass(testCase)
methods (TestMethodSetup)
function setupMethod(testCase)
testCase.applyFixture(matlab.unittest.fixtures.WorkingFolderFixture);
generateCore(fullfile(testCase.root, ...
'nwb-schema', 'core', 'nwb.namespace.yaml'));
schemaPath = fullfile(testCase.root, 'nwb-schema');

generateCore(...
fullfile(schemaPath, 'hdmf-common-schema', 'common', 'namespace.yaml'),...
fullfile(schemaPath, 'core', 'nwb.namespace.yaml'));
testCase.file = NwbFile( ...
'session_description', 'a test NWB File', ...
'identifier', 'TEST123', ...
Expand Down Expand Up @@ -45,7 +48,8 @@ function verifyContainerEqual(testCase, actual, expected)
val1 = actual.(prop);
val2 = expected.(prop);
failmsg = ['Values for property ''' prop ''' are not equal'];
if startsWith(class(val1), 'types.core.')
if startsWith(class(val1), 'types.')...
&& ~startsWith(class(val1), 'types.untyped')
verifyContainerEqual(testCase, val1, val2);
elseif isa(val1, 'types.untyped.Set')
verifySetEqual(testCase, val1, val2, failmsg);
Expand Down
19 changes: 10 additions & 9 deletions +tests/+system/PyNWBIOTest.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,11 @@
import numpy.testing as npt
import h5py

from pynwb import NWBContainer, get_manager, NWBFile, NWBData, TimeSeries
from pynwb import get_manager, NWBFile, TimeSeries
from pynwb.ecephys import ElectricalSeries, Clustering
from pynwb.ophys import OpticalChannel, TwoPhotonSeries
from hdmf.backends.hdf5 import HDF5IO
from hdmf.container import Container, Data

class PyNWBIOTest(unittest.TestCase):
def setUp(self):
Expand Down Expand Up @@ -62,10 +63,10 @@ def assertContainerEqual(self, container1, container2): # noqa: C901
f2 = getattr(container2, nwbfield)
if isinstance(f1, (tuple, list, np.ndarray)):
if len(f1) > 0:
if isinstance(f1[0], NWBContainer):
if isinstance(f1[0], Container):
for sub1, sub2 in zip(f1, f2):
self.assertContainerEqual(sub1, sub2)
elif isinstance(f1[0], NWBData):
elif isinstance(f1[0], Data):
for sub1, sub2 in zip(f1, f2):
self.assertDataEqual(sub1, sub2)
continue
Expand All @@ -78,21 +79,21 @@ def assertContainerEqual(self, container1, container2): # noqa: C901
self.assertAlmostEqual(v1, v2, places=6)
else:
self.assertTrue(np.array_equal(f1, f2))
elif isinstance(f1, dict) and len(f1) and isinstance(next(iter(f1.values())), NWBContainer):
elif isinstance(f1, dict) and len(f1) and isinstance(next(iter(f1.values())), Container):
f1_keys = set(f1.keys())
f2_keys = set(f2.keys())
self.assertSetEqual(f1_keys, f2_keys)
for k in f1_keys:
with self.subTest(module_name=k):
self.assertContainerEqual(f1[k], f2[k])
elif isinstance(f1, NWBContainer):
elif isinstance(f1, Container) or isinstance(f1, Container):
self.assertContainerEqual(f1, f2)
elif isinstance(f1, NWBData) or isinstance(f2, NWBData):
if isinstance(f1, NWBData) and isinstance(f2, NWBData):
elif isinstance(f1, Data) or isinstance(f2, Data):
if isinstance(f1, Data) and isinstance(f2, Data):
self.assertDataEqual(f1, f2)
elif isinstance(f1, NWBData):
elif isinstance(f1, Data):
self.assertTrue(np.array_equal(f1.data, f2))
elif isinstance(f2, NWBData):
elif isinstance(f2, Data):
self.assertTrue(np.array_equal(f1.data, f2))
else:
if isinstance(f1, (float, np.float32, np.float16, h5py.Dataset)):
Expand Down
6 changes: 3 additions & 3 deletions +tests/+system/UnitTimesIOTest.m
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,17 @@
methods
function addContainer(~, file)
vdata = rand(10,1);
vd = types.core.VectorData('data', vdata, 'description', 'descr');
vd = types.hdmf_common.VectorData('data', vdata, 'description', 'descr');

spike_loc = '/units/spike_times';
vd_ref = [...
types.untyped.RegionView(spike_loc, 1),...
types.untyped.RegionView(spike_loc, 2:5),...
types.untyped.RegionView(spike_loc, 9:10)...
];
vi = types.core.VectorIndex('data', vd_ref,...
vi = types.hdmf_common.VectorIndex('data', vd_ref,...
'target', types.untyped.ObjectView(spike_loc));
ei = types.core.ElementIdentifiers('data', 1:3);
ei = types.hdmf_common.ElementIdentifiers('data', 1:3);
file.units = types.core.Units(...
'colnames', {'spike_times'},...
'description', 'test Units',...
Expand Down
6 changes: 3 additions & 3 deletions +tests/+system/smokeTest.m
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,10 @@ function testSmokeInstantiateCore(testCase)
function testSmokeReadWrite(testCase)
epochs = types.core.TimeIntervals(...
'colnames', {'id' 'start_time' 'stop_time'} .',...
'id', types.core.ElementIdentifiers('data', 1),...
'id', types.hdmf_common.ElementIdentifiers('data', 1),...
'description', 'test TimeIntervals',...
'start_time', types.core.VectorData('data', 0, 'description', 'start time'),...
'stop_time', types.core.VectorData('data', 1, 'description', 'stop time'));
'start_time', types.hdmf_common.VectorData('data', 0, 'description', 'start time'),...
'stop_time', types.hdmf_common.VectorData('data', 1, 'description', 'stop time'));
file = NwbFile('identifier', 'st', 'session_description', 'smokeTest', ...
'session_start_time', datetime, 'intervals_epochs', epochs,...
'timestamps_reference_time', datetime);
Expand Down
5 changes: 5 additions & 0 deletions +types/+untyped/DatasetClass.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
classdef DatasetClass < handle
%DATASETCLASS Is a HDF5 dataset class
% Used by MetaClass to identify HDF5 class type.
end

5 changes: 5 additions & 0 deletions +types/+untyped/GroupClass.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
classdef GroupClass < handle
%GROUPCLASS Is a HDF5 Group class
% Used by MetaClass to identify HDF5 class type.
end

74 changes: 41 additions & 33 deletions +types/+untyped/MetaClass.m
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,46 @@
methods
function obj = MetaClass(varargin)
end

end

methods (Access = private)
function refs = write_base(obj, fid, fullpath, refs)
if isa(obj, 'types.untyped.GroupClass')
io.writeGroup(fid, fullpath);
return;
end

try
if isa(obj.data, 'types.untyped.DataStub')
refs = obj.data.export(fid, fullpath, refs);
elseif istable(obj.data) || isstruct(obj.data) ||...
isa(obj.data, 'containers.Map')
io.writeCompound(fid, fullpath, obj.data);
else
io.writeDataset(fid, fullpath, obj.data, 'forceArray');
end
catch ME
if strcmp(ME.stack(2).name, 'getRefData') && ...
endsWith(ME.stack(1).file, ...
fullfile({'+H5D','+H5R'}, {'open.m', 'create.m'}))
refs(end+1) = {fullpath};
return;
else
rethrow(ME);
end
end
end
end

methods
function refs = export(obj, fid, fullpath, refs)
%find reference properties
propnames = properties(obj);
props = cell(size(propnames));
for i=1:length(propnames)
props{i} = obj.(propnames{i});
end

refProps = cellfun('isclass', props, 'types.untyped.ObjectView') |...
cellfun('isclass', props, 'types.untyped.RegionView');
props = props(refProps);
Expand All @@ -27,49 +59,25 @@
end
end
end


if isa(obj, 'types.core.NWBContainer')
io.writeGroup(fid, fullpath);
elseif isa(obj, 'types.core.NWBData') || isa(obj, 'types.core.SpecFile')
try
if isa(obj.data, 'types.untyped.DataStub')
refs = obj.data.export(fid, fullpath, refs);
elseif istable(obj.data) || isstruct(obj.data) ||...
isa(obj.data, 'containers.Map')
io.writeCompound(fid, fullpath, obj.data);
else
io.writeDataset(fid, fullpath, obj.data, 'forceArray');
end
catch ME
if strcmp(ME.stack(2).name, 'getRefData') && ...
endsWith(ME.stack(1).file, ...
fullfile({'+H5D','+H5R'}, {'open.m', 'create.m'}))
refs(end+1) = {fullpath};
return;
else
rethrow(ME);
end
end
end

refs = obj.write_base(fid, fullpath, refs);

uuid = char(java.util.UUID.randomUUID().toString());
if isa(obj, 'NwbFile')
io.writeAttribute(fid,'/namespace', 'core');
io.writeAttribute(fid,'/neurodata_type', 'NWBFile');
io.writeAttribute(fid, '/namespace', 'core');
io.writeAttribute(fid, '/neurodata_type', 'NWBFile');
io.writeAttribute(fid, '/object_id', uuid);
else
namespacePath = [fullpath '/namespace'];
neuroTypePath = [fullpath '/neurodata_type'];
uuidPath = [fullpath '/object_id'];
dotparts = split(class(obj), '.');
namespace = dotparts{2};
namespace = strrep(dotparts{2}, '_', '-');
classtype = dotparts{3};
io.writeAttribute(fid, namespacePath, namespace);
io.writeAttribute(fid, neuroTypePath, classtype);
io.writeAttribute(fid, uuidPath, uuid);
end

% UUID
uuid = char(java.util.UUID.randomUUID().toString());
io.writeAttribute(fid, [fullpath '/object_id'], uuid);
end

function obj = loadAll(obj)
Expand Down
6 changes: 3 additions & 3 deletions +util/create_indexed_column.m
Original file line number Diff line number Diff line change
Expand Up @@ -37,13 +37,13 @@
end

if exist('table', 'var')
data_vector = types.core.DynamicTableRegion('table', table, ...
data_vector = types.hdmf_common.DynamicTableRegion('table', table, ...
'description', description, 'data', data);
else
data_vector = types.core.VectorData('data', data, 'description', description);
data_vector = types.hdmf_common.VectorData('data', data, 'description', description);
end

ov = types.untyped.ObjectView(path);
data_index = types.core.VectorIndex('data', bounds, 'target', ov);
data_index = types.hdmf_common.VectorIndex('data', bounds, 'target', ov);


Loading

0 comments on commit 929ba2f

Please sign in to comment.