Commit 35f5cbaf authored by YuxinSHI's avatar YuxinSHI
Browse files

Merge branch 'master' into 'merge_master_server'

# Conflicts:
#   fdi/dataset/resources/BaseProduct.yml
parents 0343bfa1 a06a0d42
test dataset pal:
script: make test1 test2
......@@ -6,6 +6,10 @@ Change Log
BETA0
=====
1.0rc 2020-08-17
----------------
YAML v1.0 with datasets and metadata description unified and multiple inheriting. Revision of header keywords. and table sceme. Improvement of yaml2python and a custom moule loader. ProductInfo becomes a module object. Runs with FSC products.
0.20 2020-08-04
----------------
YAML v0.6 switch version and schema. multiple YAML files enabled by -y semantic change. remove version in serializableClassID.
......
......@@ -13,7 +13,6 @@ B_TEMPLATE = $(RESDIR)
py: $(PYDIR)/$(B_PY) $(PYDIR)/$(P_PY)
$(PYDIR)/$(P_PY): $(PYDIR)/yaml2python.py $(P_YAML) $(P_TEMPLATE)/$(PRODUCT).template $(PYDIR)/$(B_PY)
#echo 'class '$(PRODUCT)'(): pass' > $(PYDIR)/$(P_PY)
python3 -m fdi.dataset.yaml2python -y $(P_YAML) -t $(P_TEMPLATE) -o $(PYDIR)
......@@ -28,6 +27,8 @@ $(PYDIR)/$(B_PY): $(RESDIR)/$(B_INFO)
It must be manually integrated into $(PYDIR)/$(B_PY).
@ echo Re-run make after editing. Exiting... ; exit
yamlupgrade:
python3 -m fdi.dataset.yaml2python -y $(P_YAML) -u
.PHONY: runserver reqs install uninstall vtag FORCE \
......@@ -75,7 +76,7 @@ reqs:
VERSION = $(shell python -c "from setuptools_scm import get_version;print(get_version('.'))")
versiontag:
@ echo update _version.py and tag to $(VERSION)
@ echo version = \"$(VERSION)\" > fdi/_version.py
@ echo __version__ = \"$(VERSION)\" > fdi/_version.py
git tag $(VERSION)
TESTLOG = tests/log
......
#https://git-scm.com/book/en/v2/Git-Basics-Getting-a-Git-Repository
git add LICENSE README.md CHANGELOG.md requirements.txt setup.py .gitignore noxfile.py Makefile .readthedocs.yml
git add LICENSE README.md CHANGELOG.md requirements.txt setup.py .gitignore noxfile.py Makefile .readthedocs.yml .gitlab-ci.yml
git add bin/reinstall bin/installpns bin/update bin/gitadd
git add resources
git add fdi/*.py
......
version = "0.20.dev9+gd2579e3.d20200804"
__version__ = "1.0rc1.dev0+g252e0d5.d20200817"
......@@ -53,63 +53,10 @@ class BaseProduct( AbstractComposite, Copyable, Serializable, EventSender):
p.meta['creator']=Parameter('bar')
assert p.meta['creator']==Parameter('bar')
BaseProduct class (level ALL) schema 0.6 inheriting None. Automatically generated from fdi/dataset/resources/BaseProduct.yml on 2020-08-03 12:19:57.186246.
BaseProduct class (level ALL) schema 1.0 inheriting [None]. Automatically generated from fdi/dataset/resources/BaseProduct.yml on 2020-08-17 17:03:44.479156.
"""
productInfo = {
'metadata': OrderedDict({
'description': {
'fits_keyword': 'DESCRIPT',
'data_type': 'string',
'description': 'Description of this product',
'unit': 'None',
'default': 'UNKOWN',
'valid': '',
},
'type': {
'fits_keyword': 'TYPE',
'data_type': 'string',
'description': 'Product Type identification. Fully qualified Python class name or CARD.',
'unit': 'None',
'default': 'BaseProduct',
'valid': '',
},
'creator': {
'fits_keyword': 'CREATOR',
'data_type': 'string',
'description': 'Generator of this product. Example name of institute, organization, person, software, special algorithm etc.',
'unit': 'None',
'default': 'UNKOWN',
'valid': '',
},
'creationDate': {
'fits_keyword': 'DATE',
'data_type': 'finetime',
'description': 'Creation date of this product',
'unit': 'None',
'default': '0',
'valid': '',
},
'rootCause': {
'fits_keyword': 'ROOTCAUS',
'data_type': 'string',
'description': 'Reason of this run of pipeline.',
'unit': 'None',
'default': 'UNKOWN',
'valid': '',
},
'version': {
'fits_keyword': 'VERSION',
'data_type': 'string',
'description': 'Version of product schema',
'unit': 'None',
'default': '0.4',
'valid': '',
},
}),
}
def __init__(self,
description = 'UNKOWN',
......@@ -120,6 +67,9 @@ class BaseProduct( AbstractComposite, Copyable, Serializable, EventSender):
version = '0.4',
**kwds):
global ProductInfo
self.pInfo = ProductInfo
if 'metasToBeInstalled' not in kwds:
# this class is being called directly
......@@ -142,18 +92,20 @@ class BaseProduct( AbstractComposite, Copyable, Serializable, EventSender):
# must be the first line to initiate meta and get description
super(BaseProduct, self).__init__(description=description, **kwds)
# print(self.productInfo['metadata'].keys())
# print(self.pInfo['metadata'].keys())
# print(metasToBeInstalled)
# print('# ' + self.meta.toString())
self.installMetas(mtbi=metasToBeInstalled)
self.history = History()
def installMetas(self, mtbi):
def installMetas(self, mtbi, prodInfo=None):
""" put parameters in group in product metadata, and updates productInfo. values in mtbi override those default ones in group.
"""
if prodInfo is None:
prodInfo = self.pInfo
for met, params in self.productInfo['metadata'].items():
for met, params in prodInfo['metadata'].items():
# pdb.set_trace() # description has been set by Anotatable.__init__
if met != 'description':
# type_ in mtbi (from __init__) changed to type
......@@ -178,9 +130,9 @@ class BaseProduct( AbstractComposite, Copyable, Serializable, EventSender):
read, and returns the values only.
"""
# print('getattribute ' + name)
if name not in ['productInfo', '_meta'] and hasattr(self, '_meta'):
if name not in ['pInfo', '_meta'] and hasattr(self, '_meta'):
# self.hasMeta():
if name in self.productInfo['metadata']:
if name in self.pInfo['metadata']:
# if meta does not exist, inherit Attributable
# before any class that access mandatory attributes
# print('aa ' + selftr(self.getMeta()[name]))
......@@ -193,7 +145,7 @@ class BaseProduct( AbstractComposite, Copyable, Serializable, EventSender):
value: Must be Parameter/NumericParameter if this is normal metadata, depending on if it is Number. Value if mandatory / built-in attribute.
"""
if self.hasMeta():
met = self.productInfo['metadata']
met = self.pInfo['metadata']
if name in met.keys():
# a special attribute like 'description'. store in meta
m = self.getMeta()
......@@ -241,7 +193,7 @@ class BaseProduct( AbstractComposite, Copyable, Serializable, EventSender):
def __delattr__(self, name):
""" Refuses deletion of mandatory attributes
"""
if name in self.productInfo['metadata'].keys():
if name in self.pInfo['metadata'].keys():
logger.warn('Cannot delete Mandatory Attribute ' + name)
super(BaseProduct, self).__delattr__(name)
......@@ -270,7 +222,7 @@ class BaseProduct( AbstractComposite, Copyable, Serializable, EventSender):
''' meta and datasets only show names
'''
s = '{'
"""for lvar in self.productInfo['metadata'].keys():
"""for lvar in self.pInfo['metadata'].keys():
if hasattr(self, lvar):
s += '%s = %s, ' % (lvar, getattr(self, lvar))
"""
......@@ -296,8 +248,8 @@ class BaseProduct( AbstractComposite, Copyable, Serializable, EventSender):
("_sets", self._sets),
("history", self.history),
("listeners", self.listeners),
("classID", self.classID),
("version", self.version)]
("classID", self.classID)]
return ODict(ls)
......@@ -308,7 +260,7 @@ def addMandatoryProductAttrs(cls):
https://stackoverflow.com/a/2584050
https://stackoverflow.com/a/1355444
"""
for name in self.productInfo['metadata'].keys():
for name in self.pInfo['metadata'].keys():
def g(self):
return self._meta[name]
......@@ -331,3 +283,66 @@ def addMandatoryProductAttrs(cls):
# Product = addMandatoryProductAttrs(Product)
ProductInfo = {
'name': 'BaseProduct',
'description': 'FDI base class',
'parents': [
None,
],
'level': 'ALL',
'schema': '1.0',
'metadata': {
'description': {
'fits_keyword': 'DESCRIPT',
'data_type': 'string',
'description': 'Description of this product',
'unit': None,
'default': 'UNKOWN',
'valid': '',
},
'type': {
'fits_keyword': 'TYPE',
'data_type': 'string',
'description': 'Product Type identification. Fully qualified Python class name or CARD.',
'unit': None,
'default': 'BaseProduct',
'valid': '',
},
'creator': {
'fits_keyword': 'CREATOR',
'data_type': 'string',
'description': 'Generator of this product. Example name of institute, organization, person, software, special algorithm etc.',
'unit': None,
'default': 'UNKOWN',
'valid': '',
},
'creationDate': {
'fits_keyword': 'DATE',
'data_type': 'finetime',
'description': 'Creation date of this product',
'unit': 'None',
'default': '0',
'valid': '',
},
'rootCause': {
'fits_keyword': 'ROOTCAUS',
'data_type': 'string',
'description': 'Reason of this run of pipeline.',
'unit': None,
'default': 'UNKOWN',
'valid': '',
},
'version': {
'fits_keyword': 'VERSION',
'data_type': 'string',
'description': 'Version of product schema',
'unit': None,
'default': '0.4',
'valid': '',
},
},
'datasets': {
},
}
......@@ -3,11 +3,13 @@
import logging
import copy
import importlib
import traceback
import pdb
from .odict import ODict
from ..utils.common import trbk
from ..utils.moduleloader import SelectiveMetaFinder, installSelectiveMetaFinder
import sys
if sys.version_info[0] >= 3: # + 0.1 * sys.version_info[1] >= 3.3:
......@@ -61,61 +63,52 @@ class Classes_meta(type):
"""
super().__init__(*args, **kwds)
def updateMapping(cls, c={}):
def updateMapping(cls, c=None, rerun=False, exclude=[]):
""" Updates classes mapping.
Make the package mapping if it has not been made.
Set rerun=True to reimport module-class list and update mapping with it, with specified modules excluded, before updating with c. If the module-class list has never been imported, it will be imported regardless rerun.
"""
if len(cls._package) == 0:
cls.makePackageClasses()
#
cls.importModuleClasses(rerun=rerun, exclude=exclude)
# cls._classes.clear()
cls._classes.update(copy.copy(cls._package))
cls._classes.update(c)
if c:
cls._classes.update(c)
def makePackageClasses(cls, rerun=False, exclude=[]):
""" The set of fdi package-wide deserializable classes is maintained by hand.
Do nothing if the classes mapping is already made so repeated calls will not cost lots more time. Set rerun to True to force re-exction.
def importModuleClasses(cls, rerun=False, exclude=[]):
""" The set of eserializable classes in modclass is maintained by hand.
Do nothing if the classes mapping is already made so repeated calls will not cost lots more time. Set rerun to True to force re-import. If the module-class list has never been imported, it will be imported regardless rerun.
modules whose names (without '.') are in exclude are not imported.
"""
if len(cls._package) and not rerun:
return
"""
from fdi.dataset.deserialize import deserializeClassID
from fdi.dataset.finetime import FineTime, FineTime1, utcobj
from fdi.dataset.history import History
from fdi.dataset.baseproduct import BaseProduct
from fdi.dataset.product import Product
from fdi.dataset.datatypes import Vector, Quaternion
from fdi.dataset.metadata import Parameter, NumericParameter, MetaData
from fdi.dataset.dataset import GenericDataset, ArrayDataset, \
TableDataset, CompositeDataset, Column
from fdi.pal.context import Context, MapContext, RefContainer, \
ContextRuleException
from fdi.pal.urn import Urn
from fdi.pal.productref import ProductRef
cls._package.update(locals())
del cls._package['cls']
del cls._package['rerun']
"""
cls._package.clear()
SelectiveMetaFinder.exclude = exclude
# print('With %s excluded..' % (str(exclude)))
for modnm, froml in cls.modclass.items():
exed = [x for x in froml if x not in exclude]
if len(exed) == 0:
if any((x in exclude for x in modnm.split('.'))):
continue
# print('importing %s from %s' % (str(exed), modnm))
#print('importing %s from %s' % (str(froml), modnm))
try:
m = importlib.__import__(modnm, globals(), locals(), exed)
except Exception as e:
#print('Importing %s not successful. %s' % (str(exed), str(e)))
pass
#m = importlib.__import__(modnm, globals(), locals(), froml)
m = importlib.import_module(modnm)
except SelectiveMetaFinder.ExcludedModule as e:
logger.error('Importing %s not successful. %s' %
(str(froml), str(e)))
#ety, enm, tb = sys.exc_info()
else:
for n in exed:
for n in froml:
#print(n, m)
# print(dir(m))
cls._package[n] = getattr(m, n)
return
def reloadClasses(cls):
""" re-import classes in list. """
""" re-import classes in mapping list, which is supposed to be populated. """
for n, t in cls._classes.items():
mo = importlib.import_module(t.__module__)
importlib.reload(mo)
......@@ -127,8 +120,7 @@ class Classes_meta(type):
def mapping(cls):
""" Returns the dictionary of classes allowed for deserialization, including the fdi built-ins and user added classes.
"""
if len(cls._classes) == 0:
cls.updateMapping()
return cls._classes
@mapping.setter
......@@ -138,6 +130,12 @@ class Classes_meta(type):
raise NotImplementedError('Use Classes.updateMapping(c).')
cls.updateMapping(c)
def get(cls, name):
""" returns class objects by name """
if len(cls._classes) == 0:
cls.updateMapping()
return cls._classes[name]
class Classes(metaclass=Classes_meta):
""" A dictionary of class names and their class objects that are allowed to be deserialized.
......@@ -150,3 +148,8 @@ class Classes(metaclass=Classes_meta):
"""
pass
globals()
# pdb.set_trace()
# Classes.importModuleClasses()
......@@ -6,6 +6,7 @@ import pdb
from .odict import ODict
from .classes import Classes
from ..utils.common import lls
import sys
if sys.version_info[0] >= 3: # + 0.1 * sys.version_info[1] >= 3.3:
......@@ -26,43 +27,6 @@ Serializable.
'''
def imakedesables():
""" makes a class dictionary for instanciation.
"""
from fdi.dataset.deserialize import deserializeClassID
from fdi.dataset.finetime import FineTime, FineTime1, utcobj
from fdi.dataset.baseproduct import History, BaseProduct
from fdi.dataset.product import Product
from fdi.dataset.datatypes import Vector, Quaternion
from fdi.dataset.metadata import Parameter, NumericParameter, MetaData
from fdi.dataset.dataset import GenericDataset, ArrayDataset, TableDataset, CompositeDataset, Column
try:
from svom.products.chart import ATC_VT_B, ATC_VT_R, FDC_VT_B, FDC_VT_R
except Exception as e:
logger.info(str(e))
pass
from fdi.pal.context import MapContext, RefContainer, ContextRuleException
from fdi.pal.urn import Urn
from fdi.pal.productref import ProductRef
l = locals()
# logger.debug(l)
return l
#desables = None
def lls(s, length=80):
""" length-limited string
"""
if len(s) <= length:
return str(s)
else:
return str(s[:length - 3]) + '...'
def constructSerializableClassID(obj, lgb=None, debug=False):
""" mh: reconstruct object from the output of jason.loads().
Recursively goes into nested class instances that are not
......
......@@ -34,6 +34,7 @@ ParameterTypes = {
'product': 'Product',
'vector': 'Vector',
'quaternion': 'Quaternion',
'null': 'None',
'': 'None'
}
......
......@@ -13,142 +13,154 @@ logger = logging.getLogger(__name__)
# logger.debug('level %d' % (logger.getEffectiveLevel()))
class Product(BaseProduct):
""" Product class (level ALL) schema 0.6 inheriting BaseProduct. Automatically generated from fdi/dataset/resources/Product.yml on 2020-08-03 12:20:26.013775.
class Product(BaseProduct,):
""" Product class (level ALL) schema 1.0 inheriting ['BaseProduct']. Automatically generated from fdi/dataset/resources/Product.yml on 2020-08-17 02:46:24.843643.
Generally a Product (inheriting BaseProduct) has project-wide attributes and can be extended to define a plethora of specialized products.
"""
productInfo = {
'metadata': OrderedDict({
'description': {
def __init__(self,
description = 'UNKOWN',
type_ = 'Product',
creator = 'UNKOWN',
creationDate = FineTime1(0),
rootCause = 'UNKOWN',
version = '0.5',
startDate = FineTime1(0),
endDate = FineTime1(0),
instrument = 'UNKOWN',
modelName = 'UNKOWN',
mission = '_AGS',
**kwds):
""" Initializes instances with more metadata as attributes, set to default values.
Put description keyword argument here to allow 'BaseProduct("foo") and description='foo'
"""
if 'metasToBeInstalled' not in kwds:
# this class is being called directly
# list of local variables.
lvar = locals()
lvar.pop('self')
lvar.pop('__class__')
lvar.pop('kwds')
metasToBeInstalled = lvar
else:
# This class is being called probably from super() in a subclass
metasToBeInstalled = kwds['metasToBeInstalled']
del kwds['metasToBeInstalled']
# must be the first line to initiate meta and get description
super(Product , self).__init__(
metasToBeInstalled=metasToBeInstalled, **kwds)
global ProductInfo
self.pInfo = ProductInfo
super(Product , self).installMetas(
mtbi=metasToBeInstalled, prodInfo=ProductInfo)
ProductInfo = {
'name': 'Product',
'description': 'Project level product',
'parents': [
'BaseProduct',
],
'level': 'ALL',
'schema': '1.0',
'metadata': {
'description': {
'fits_keyword': 'DESCRIPT',
'data_type': 'string',
'description': 'Description of this product',
'unit': 'None',
'unit': None,
'default': 'UNKOWN',
'valid': '',
},
'type': {
},
'type': {
'fits_keyword': 'TYPE',
'data_type': 'string',
'description': 'Product Type identification. Fully qualified Python class name or CARD.',
'unit': 'None',
'unit': None,
'default': 'Product',
'valid': '',
},
'creator': {
},
'creator': {
'fits_keyword': 'CREATOR',
'data_type': 'string',
'description': 'Generator of this product. Example name of institute, organization, person, software, special algorithm etc.',
'unit': 'None',
'unit': None,
'default': 'UNKOWN',
'valid': '',
},
'creationDate': {
},
'creationDate': {
'fits_keyword': 'DATE',
'data_type': 'finetime',
'description': 'Creation date of this product',
'unit': 'None',
'default': '0',
'valid': '',
},
'rootCause': {
},
'rootCause': {
'fits_keyword': 'ROOTCAUS',
'data_type': 'string',
'description': 'Reason of this run of pipeline.',
'unit': 'None',
'unit': None,
'default': 'UNKOWN',
'valid': '',
},
'version': {
},
'version': {
'fits_keyword': 'VERSION',
'data_type': 'string',
'description': 'Version of product schema',
'unit': 'None',
'unit': None,
'default': '0.5',
'valid': '',
},
'startDate': {
},
'startDate': {
'fits_keyword': 'DATE_OBS',
'data_type': 'finetime',
'description': 'Nominal start time of this product.',
'unit': 'None',
'default': '0',
'valid': '',
},
'endDate': {
},
'endDate': {
'fits_keyword': 'DATE_END',
'data_type': 'finetime',
'description': 'Nominal end time of this product.',
'unit': 'None',
'default': '0',
'valid': '',
},
'instrument': {
},
'instrument': {
'fits_keyword': 'INSTRUME',
'data_type': 'string',
'description': 'Instrument that generated data of this product',