Commit 04f3515f authored by M. Huang's avatar M. Huang
Browse files

merge develop

parents 6f4ace53 22a281eb
......@@ -13,7 +13,7 @@ except ImportError:
Model = {'metadata': {}}
from collections.abc import Sequence
from collections.abc import Sequence, Iterable
from collections import OrderedDict
import itertools
......@@ -25,7 +25,7 @@ logger = logging.getLogger(__name__)
MdpInfo = Model['metadata']
class ArrayDataset(DataWrapper, GenericDataset, Sequence, Typed, Typecoded):
class ArrayDataset(GenericDataset, Iterable):
""" Special dataset that contains a single Array Data object.
mh: If omit the parameter names during instanciation, e.g. ArrayDataset(a, b, c), the assumed order is data, unit, description.
mh: contains a sequence which provides methods count(), index(), remove(), reverse().
......
# -*- coding: utf-8 -*-
from .odict import ODict
from .eq import DeepEqual
#from collections import OrderedDict
from collections import Sized, Container, Iterator, OrderedDict
import logging
# create logger
logger = logging.getLogger(__name__)
......@@ -10,8 +10,10 @@ logger = logging.getLogger(__name__)
# from .listener import DatasetEventSender, DatasetListener
# from .metadata import DataWrapperMapper
# order of Container, Sized, Iterator must be the same as in DataContaier!
class Composite(DeepEqual):
class Composite(DeepEqual, Container, Sized, Iterator):
""" A container of named Datasets.
This container can hold zero or more datasets, each of them
......@@ -25,7 +27,7 @@ class Composite(DeepEqual):
def __init__(self, **kwds):
self._sets = ODict()
super(Composite, self).__init__(**kwds)
super().__init__(**kwds)
def containsKey(self, name):
""" Returns true if this map contains a mapping for
......
This diff is collapsed.
......@@ -5,14 +5,14 @@ from .eq import DeepEqual
from .copyable import Copyable
from .annotatable import Annotatable
from collections.abc import Container
from collections.abc import Container, Sized
import logging
# create logger
logger = logging.getLogger(__name__)
#logger.debug('level %d' % (logger.getEffectiveLevel()))
class DataContainer(Annotatable, Quantifiable, Copyable, DeepEqual):
class DataContainer(Annotatable, Copyable, DeepEqual, Container, Sized):
""" A DataContainer is a composite of data and description.
mh: note that There is no metadata.
Implemented partly from AbstractDataWrapper.
......@@ -60,8 +60,24 @@ class DataContainer(Annotatable, Quantifiable, Copyable, DeepEqual):
""" Returns whether this data wrapper has data. """
return self.getData() is not None and len(self.getData()) > 0
def __contains__(self, x):
"""
"""
try:
return x in self._data
except AttributeError:
return False
def __len__(self, *args, **kwargs):
""" size of data
"""
try:
return self._data.__len__(*args, **kwargs)
except AttributeError:
return 0
class DataWrapper(DataContainer):
class DataWrapper(DataContainer, Quantifiable):
""" A DataWrapper is a composite of data, unit and description.
mh: note that all data are in the same unit. There is no metadata.
Implemented from AbstractDataWrapper.
......
......@@ -38,19 +38,3 @@ class Quantifiable(Typecoded):
def setUnit(self, unit):
""" Sets the unit of this object. """
self._unit = unit
@property
def typecode(self):
return self.getTypecode()
@typecode.setter
def typecode(self, typecode):
self.setTypecode(typecode)
def getTypecode(self):
""" Returns the typecode related to this object."""
return self._typecode
def setTypecode(self, typecode):
""" Sets the typecode of this object. """
self._typecode = typecode
# -*- coding: utf-8 -*-
from .typed import Typed
from .typecoded import Typecoded
from .listener import ColumnListener
from .indexed import Indexed
from .ndprint import ndprint
from .odict import ODict
import itertools
from ..utils.common import mstr, bstr, lls, exprstrs
from .dataset import GenericDataset
from .dataset import Dataset
try:
from .arraydataset_datamodel import Model
except ImportError:
......@@ -77,10 +74,10 @@ class TableModel(object):
self.getColumn(columnIndex).data[rowIndex] = value
MdpInfo = Model['metadata']
-MdpInfo = Model['metadata']
class TableDataset(GenericDataset, TableModel):
class TableDataset(Dataset, TableModel):
""" Special dataset that contains a single Array Data object.
A TableDataset is a tabular collection of Columns. It is optimized to work on array data..
The column-wise approach is convenient in many cases. For example, one has an event list, and each algorithm is adding a new field to the events (i.e. a new column, for example a quality mask).
......@@ -105,7 +102,6 @@ class TableDataset(GenericDataset, TableModel):
"""
def __init__(self, data=None,
description=None,
zInfo=None,
alwaysMeta=True,
**kwds):
......
......@@ -8,7 +8,7 @@ logger = logging.getLogger(__name__)
class Typed(object):
""" An object of specifiable kinds. """
def __init__(self, typ_='', **kwds):
def __init__(self, typ_=None, **kwds):
""" Has a type.
"""
......
# -*- coding: utf-8 -*-
from collections.abc import Mapping, Sequence
......@@ -19,6 +20,8 @@ If the above fail and a method whose name starts with 'is' then the method is ca
p0 = paths[0]
found_meth = None
print(p0)
is_str = issubclass(p0.__class__, str)
if is_str and hasattr(nested, p0):
v = getattr(nested, p0)
......@@ -45,14 +48,12 @@ If the above fail and a method whose name starts with 'is' then the method is ca
# can be converted to numerics
p0 = list(num)
try:
if hasattr(nested, 'items') and (p0 in nested) or \
hasattr(nested, '__iter__') and (p0 < len(list(nested))):
v = nested[p0]
q = '"' if issubclass(p0.__class__, str) else ''
rep = re + '['+q + str(p0) + q + ']'
if len(paths) == 1:
return v, rep
return fetch(paths[1:], v, rep)
v = nested[p0]
q = '"' if issubclass(p0.__class__, str) else ''
rep = re + '['+q + str(p0) + q + ']'
if len(paths) == 1:
return v, rep
return fetch(paths[1:], v, rep)
except TypeError:
pass
# not attribute or member
......
......@@ -452,7 +452,6 @@ def test_EventSender(mocksndrlsnr):
watcher.fileChanged.removeListener(l2)
watcher.watchFiles()
assert test123 == "'foo' changed."
# __import__('pdb').set_trace()
def test_MqttRelay_mqtt(mocksndrlsnr):
......@@ -1420,6 +1419,7 @@ def test_ArrayDataset_array_func():
def test_Column():
v = Column(data=[4, 9], unit='m')
assert v.data == [4, 9]
s = v.hash()
checkjson(v)
......@@ -1436,17 +1436,18 @@ def test_TableDataset_init():
t = 5
t = TableDataset(data=42)
with pytest.raises(DeprecationWarning):
t = TableDataset(data=[{'name': 'a', 'column': Column(data=[])}])
# setData format 1: data is a mapping. Needs pytnon 3.6 to guarantee order
a1 = {'col1': Column(data=[1, 4.4, 5.4E3], unit='eV'),
'col2': Column(data=[0, 43.2, 2E3], unit='cnt')}
v = TableDataset(data=a1) # inherited from DataWrapper
assert v.getColumnCount() == len(a1)
assert v.getColumnName(0) == 'col1'
t = a1['col2'].data[1] # 43.2
assert v.getValueAt(rowIndex=1, columnIndex=1) == t
if 1:
with pytest.raises(DeprecationWarning):
t = TableDataset(data=[{'name': 'a', 'column': Column(data=[])}])
# setData format 1: data is a mapping. Needs pytnon 3.6 to guarantee order
a1 = {'col1': Column(data=[1, 4.4, 5.4E3], unit='eV'),
'col2': Column(data=[0, 43.2, 2E3], unit='cnt')}
v = TableDataset(data=a1) # inherited from DataContaier
assert v.getColumnCount() == len(a1)
assert v.getColumnName(0) == 'col1'
t = a1['col2'].data[1] # 43.2
assert v.getValueAt(rowIndex=1, columnIndex=1) == t
# 2: add columns one by one
v2 = TableDataset()
......@@ -1826,7 +1827,6 @@ def test_CompositeDataset_init():
a12 = NumericParameter(description='a different param in metadata',
value=2.3, unit='sec')
v.meta[a11] = a12
# def test_CompositeDataset_func():
# equality
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment