diff --git a/acme.ini.default b/acme.ini.default index 80f18903..151d4734 100644 --- a/acme.ini.default +++ b/acme.ini.default @@ -132,7 +132,8 @@ size=200 [http] ; Port to listen to. Default: 8080 port=${basic.config:httpPort} -; Interface to listen to. Use 0.0.0.0 for "all" interfaces. Default:127.0.0.1 +; Interface to listen to. Use 0.0.0.0 for "all" interfaces. +; Default:0.0.0.0 listenIF=${basic.config:networkInterface} ; Own address. Should be a local/public reachable address. ; Default: http://127.0.0.1:8080 @@ -182,6 +183,22 @@ verifyCertificate=false caCertificateFile=${basic.config:dataDirectory}/certs/acme_cert.pem ; Path and filename of the private key file. Default: None caPrivateKeyFile=${basic.config:dataDirectory}/certs/acme_key.pem +; Enable basic authentication for the HTTP binding. +; Default: false +enableBasicAuth=false +; Enable token authentication for the HTTP binding. +; Default: false +enableTokenAuth=false +; Path and filename of the http basic authentication file. +; The file must contain lines with the format "username:password". +; Comments are lines starting with a #. +; Default: certs/http_basic_auth.txt +basicAuthFile=${basic.config:dataDirectory}/certs/http_basic_auth.txt +; Path and filename of the http bearer token authentication file. +; The file must contain lines with the format "token". +; Comments are lines starting with a #. +; Default: certs/http_token_auth.txt +tokenAuthFile=${basic.config:dataDirectory}/certs/http_token_auth.txt [http.cors] @@ -193,6 +210,20 @@ enable=false resources=/* +[http.wsgi] +; Enable WSGI support for the HTTP binding. +; Default: false +enable=false +; The number of threads used to process requests. +; This number should be of similar size as the "connectionLimit" setting. +; Default: 100 +threadPoolSize=100 +; The number of possible parallel connections that can be accepted by the WSGI server. +; One connection uses one system file descriptor. +; Default: 100 +connectionLimit=100 + + ; ; MQTT client settings ; @@ -211,7 +242,7 @@ port=1883 ; Default: 60 seconds keepalive=60 ; Interface to listen to. Use 0.0.0.0 for "all" interfaces. -; Default: 127.0.0.1 +; Default: 0.0.0.0 listenIF=${basic.config:networkInterface} ; Optional prefix for topics. ; Default: empty string @@ -248,6 +279,41 @@ caCertificateFile=${basic.config:dataDirectory}/certs/m2mqtt_ca.crt allowedCredentialIDs= +; +; CoAP client settings +; + +[coap] +; Enable the CoAP binding. +; Default: false +enable=false +serverPort=5683 +; Interface to listen to. Use 0.0.0.0 for "all" interfaces. +; Default: +listenIF=${basic.config:networkInterface} + + +; +; CoAP security settings +; + +[coap.security] +; Enable DTLS for communications with the CoAP server. +; Default: False +useDTLS=false +; TLS version to be used in connections. +; Allowed versions: TLS1.1, TLS1.2, auto . Use "auto" to allow client-server certificate +; version negotiation. +; Default: auto +dtlsVersion=auto +; Verify certificates in requests. Set to False when using self-signed certificates. +; Default: False +verifyCertificate=False +; Path and filename of the certificate file. Default: ${basic.config:dataDirectory}/certs/coap_cert.pem +certificateFile=${basic.config:dataDirectory}/certs/coap_cert.pem +; Path and filename of the private key file. Default: None +privateKeyFile=${basic.config:dataDirectory}/certs/coap_key.pem + ; ; Database settings ; @@ -314,19 +380,30 @@ excludeCSRAttributes= ; [logging] -; Enable logging to file. Default: False +; Enable logging to file. +; Default: False enableFileLogging=False -; Enable logging to the screen. Default: True +; Enable logging to the screen. +; Default: True enableScreenLogging=true -; Path to the log files. Default: ./logs +; Path to the log files. +; Default: ./logs path=${basic.config:dataDirectory}/logs -; Loglevel. Allowed values: debug, info, warning, error, off. Default: debug +; Loglevel. Allowed values: debug, info, warning, error, off. +; Default: debug level=${basic.config:logLevel} -; Number of files for log rotation. Default: 10 +; Number of files for log rotation. +; Default: 10 count=10 -; Size per log file. Default: 100.000 bytes +; Size per log file. +; Default: 100.000 bytes size=100000 -; Print a stack trace when logging an 'error' level message. Default: True +; Maximum length of a log message. Longer messages will be truncated. +; A value of 0 means no truncation. +; Default: 1000 characters +maxLogMessageLength=1000 +; Print a stack trace when logging an 'error' level message. +; Default: True stackTraceOnError=False ; Enable logging of low-level HTTP & MQTT client events. ; Default: False @@ -339,6 +416,7 @@ queueSize=5000 filter=werkzeug,markdown_it,asyncio + ; ; Settings for resource announcements ; @@ -405,6 +483,43 @@ mni=10 mbs=10000 +[resource.grp] +; Set the time for aggregating the results of a group request before interrupting. +; The format is the time in ms. A value of 0 ms means no timeout. +; Default: 0 ms +resultExpirationTime=0 + +; +; Resource defaults: LocationPolicy +; + +[resource.lcp] +; Default for maxNrOfInstances for the LocationPolicy's container. +; Default: 10 +mni=10 +; Default for maxByteSize for the LocationPolicy's container. +; Default: 10.000 bytes +mbs=10000 + + +; +; Resource defaults: Request +; + +[resource.req] +; A resource's expiration time in seconds. Must be >0. Default: 60 +expirationTime=60 + + +; +; Resource defaults: Subscription +; + +[resource.sub] +; Default for batchNotify/duration in seconds. Must be >0. Default: 60 +batchNotifyDuration=60 + + ; ; Resource defaults: TimeSeries ; @@ -435,24 +550,6 @@ bcni=PT1H bcnt=10.0 -; -; Resource defaults: Request -; - -[resource.req] -; A resource's expiration time in seconds. Must be >0. Default: 60 -expirationTime=60 - - -; -; Resource defaults: Subscription -; - -[resource.sub] -; Default for batchNotify/duration in seconds. Must be >0. Default: 60 -batchNotifyDuration=60 - - ; ; Web UI settings ; @@ -521,5 +618,9 @@ verbose=False ; 0 means disable monitoring. ; Default: 2.0 seconds fileMonitoringInterval=2.0 +; Set the timeout for script execution in seconds. +; 0.0 means no timeout. +; Default: 60.0 seconds +maxRuntime=60.0 diff --git a/acme/__init__.py b/acme/__init__.py index e69de29b..408c1594 100644 --- a/acme/__init__.py +++ b/acme/__init__.py @@ -0,0 +1,4 @@ +""" This module contains the ACME CSE implementation. It is the main module of the ACME CSE. + It contains the main() function that is called when the CSE is started. + It also contains the CSE class that implements the CSE. +""" \ No newline at end of file diff --git a/acme/__main__.py b/acme/__main__.py index d4fbc692..b7fcceb7 100644 --- a/acme/__main__.py +++ b/acme/__main__.py @@ -7,6 +7,9 @@ # Starter for the ACME CSE # +""" This module contains the ACME CSE implementation. It is the main module of the ACME CSE. +""" + import os, re, sys if sys.version_info < (3, 8): print('Python version >= 3.8 is required') @@ -24,23 +27,43 @@ if 'ACME_DEBUG' in os.environ: raise e - # Give hint to run ACME as a module - if 'attempted relative import' in e.msg: - print(f'\nPlease run acme as a package:\n\n\t{sys.executable} -m {sys.argv[0]} [arguments]\n') + match e.msg: + # Give hint to run ACME as a module + case x if 'attempted relative import' in x: + print(f'\nPlease run acme as a package:\n\n\t{sys.executable} -m {sys.argv[0]} [arguments]\n') - # Give hint how to do the installation - elif 'No module named' in e.msg: - m = re.search("'(.+?)'", e.msg) - package = f' ({m.group(1)}) ' if m else ' ' - print(f'\nOne or more required packages or modules{package}could not be found.\nPlease install the missing packages, e.g. by running the following command:\n\n\t{sys.executable} -m pip install -r requirements.txt\n') - else: - print(f'\nError during import: {e.msg}\n') + # Give hint how to do the installation + case x if 'No module named' in x: + m = re.search("'(.+?)'", e.msg) + package = f' ({m.group(1)}) ' if m else ' ' + print(f'\nOne or more required packages or modules{package}could not be found.\nPlease install the missing packages, e.g. by running the following command:\n\n\t{sys.executable} -m pip install -r requirements.txt\n') + + # Ask if the user wants to install the missing packages + try: + if input('\nDo you want to install the missing packages now? [y/N] ') in ['y', 'Y']: + import os + os.system(f'{sys.executable} -m pip install -r requirements.txt') + + # Ask if the user wants to start ACME + if input('\nDo you want to start ACME now? [Y/n] ') in ['y', 'Y', '']: + os.system(f'{sys.executable} -m acme {" ".join(sys.argv[1:])}') + + except Exception as e2: + print(f'\nError during installation: {e2}\n') + + case _: + print(f'\nError during import: {e.msg}\n') quit(1) # Handle command line arguments def parseArgs() -> argparse.Namespace: + """ Parse the command line arguments. + + Returns: + The parsed arguments. + """ parser = argparse.ArgumentParser(prog='acme') parser.add_argument('--config', action='store', dest='configfile', default=C.defaultUserConfigFile, metavar='', help='specify the configuration file') @@ -48,6 +71,7 @@ def parseArgs() -> argparse.Namespace: groupEnableHttp = parser.add_mutually_exclusive_group() groupEnableHttp.add_argument('--http', action='store_false', dest='http', default=None, help='run CSE with http server') groupEnableHttp.add_argument('--https', action='store_true', dest='https', default=None, help='run CSE with https server') + groupEnableHttp.add_argument('--http-wsgi', action='store_true', dest='httpWsgi', default=None, help='run CSE with http WSGI support') groupEnableMqtt = parser.add_mutually_exclusive_group() groupEnableMqtt.add_argument('--mqtt', action='store_true', dest='mqttenabled', default=None, help='enable mqtt binding') @@ -76,6 +100,8 @@ def parseArgs() -> argparse.Namespace: def main() -> None: + """ Main function of the ACME CSE. + """ # Start the CSE with command line arguments. # In case the CSE should be started without command line parsing, the values # can be passed instead. Unknown arguments are ignored. diff --git a/acme/etc/Constants.py b/acme/etc/Constants.py index 4f8072e1..dcddf88f 100644 --- a/acme/etc/Constants.py +++ b/acme/etc/Constants.py @@ -118,6 +118,9 @@ class Constants(object): attrRiTyMapping = '__riTyMapping__' """ Constant: Name of the 'Resource internal *__riTyMapping__* attribute. This attribute holds the mapping of resourceID's to resource types. """ + attrLocCoordinage = '__locCoordinate__' + """ Constant: Name of the 'Resource internal *__locCoordinate__* attribute. This attribute holds the location coordinate of a resource. """ + # # Supported URL schemes @@ -134,4 +137,8 @@ class Constants(object): """ Maximum length of identifiers generated by the CSE """ - + # + # Network Coordination supported + # + networkCoordinationSupported = False + """ Network coordination supported by the CSE """ diff --git a/acme/etc/DateUtils.py b/acme/etc/DateUtils.py index 4699d9fe..38dde4a2 100644 --- a/acme/etc/DateUtils.py +++ b/acme/etc/DateUtils.py @@ -46,6 +46,17 @@ def toISO8601Date(ts:Union[float, datetime], readable:Optional[bool] = False) -> return ts.strftime('%Y-%m-%dT%H:%M:%S,%f' if readable else '%Y%m%dT%H%M%S,%f') +def fromISO8601Date(date:str) -> datetime: + """ Convert an ISO 8601 date time string to a *datetime* object. + + Args: + date: ISO 8601 datetime string. + Return: + Datetime object. + """ + return isodate.parse_datetime(date) + + def fromAbsRelTimestamp(absRelTimestamp:str, default:Optional[float] = 0.0, withMicroseconds:Optional[bool] = True) -> float: @@ -77,11 +88,12 @@ def fromAbsRelTimestamp(absRelTimestamp:str, return default -def fromDuration(duration:str) -> float: +def fromDuration(duration:str, allowMS:bool = True) -> float: """ Convert a duration to a number of seconds (float). Args: duration: String with either an ISO 8601 period or a string with a number of ms. + allowMS: If True, the function tries to convert the string as if it contains a number of ms. Return: Float, number of seconds. Raise: @@ -93,7 +105,9 @@ def fromDuration(duration:str) -> float: try: # Last try: absRelTimestamp could be a relative offset in ms. Try to convert # the string and return an absolute UTC-based duration - return float(duration) / 1000.0 + if allowMS: + return float(duration) / 1000.0 + raise except Exception as e: #if L.isWarn: L.logWarn(f'Wrong format for duration: {duration}') raise @@ -125,13 +139,22 @@ def rfc1123Date(timeval:Optional[float] = None) -> str: return formatdate(timeval = timeval, localtime = False, usegmt = True) +def utcDatetime() -> datetime: + """ Return the current datetime, but relative to UTC. + + Returns: + Datetime with current UTC-based time. + """ + return datetime.now(tz = timezone.utc) + + def utcTime() -> float: """ Return the current time's timestamp, but relative to UTC. Returns: Float with current UTC-based POSIX time. """ - return datetime.now(tz = timezone.utc).timestamp() + return utcDatetime().timestamp() def timeUntilTimestamp(ts:float) -> float: @@ -222,14 +245,13 @@ def waitFor(timeout:float, # Cron # -def cronMatchesTimestamp(cronPattern:Union[str, - list[str]], +def cronMatchesTimestamp(cronPattern:Union[str, list[str]], ts:Optional[datetime] = None) -> bool: ''' A cron parser to determine if the *cronPattern* matches for a given timestamp *ts*. - The cronPattern must follow the usual crontab pattern of 5 fields: + The cronPattern must follow the usual crontab pattern of 7 fields: - minute hour dayOfMonth month dayOfWeek + second minute hour dayOfMonth month dayOfWeek year which each must comply to the following patterns: @@ -324,18 +346,20 @@ def _parseMatchCronArg(element:str, target:int) -> bool: return False if ts is None: - ts = datetime.now(tz = timezone.utc) + ts = utcDatetime() cronElements = cronPattern.split() if isinstance(cronPattern, str) else cronPattern - if len(cronElements) != 5: - raise ValueError(f'Invalid or empty cron pattern: "{cronPattern}". Must have 5 elements.') + if len(cronElements) != 7: + raise ValueError(f'Invalid or empty cron pattern: "{cronPattern}". Must have 7 elements.') weekday = ts.isoweekday() - return _parseMatchCronArg(cronElements[0], ts.minute) \ - and _parseMatchCronArg(cronElements[1], ts.hour) \ - and _parseMatchCronArg(cronElements[2], ts.day) \ - and _parseMatchCronArg(cronElements[3], ts.month) \ - and _parseMatchCronArg(cronElements[4], 0 if weekday == 7 else weekday) + return _parseMatchCronArg(cronElements[0], ts.second) \ + and _parseMatchCronArg(cronElements[1], ts.minute) \ + and _parseMatchCronArg(cronElements[2], ts.hour) \ + and _parseMatchCronArg(cronElements[3], ts.day) \ + and _parseMatchCronArg(cronElements[4], ts.month) \ + and _parseMatchCronArg(cronElements[5], 0 if weekday == 7 else weekday) \ + and _parseMatchCronArg(cronElements[6], ts.year) def cronInPeriod(cronPattern:Union[str, @@ -367,7 +391,7 @@ def cronInPeriod(cronPattern:Union[str, # Fill in the default if endTs is None: - endTs = datetime.now(tz = timezone.utc) + endTs = utcDatetime() # Check the validity of the range if endTs < startTs: diff --git a/acme/etc/GeoTools.py b/acme/etc/GeoTools.py new file mode 100644 index 00000000..16577565 --- /dev/null +++ b/acme/etc/GeoTools.py @@ -0,0 +1,160 @@ +# +# GeoUtils.py +# +# (c) 2023 by Andreas Kraft +# License: BSD 3-Clause License. See the LICENSE file for further details. +# +# Various helpers for working with geo-coordinates, shapely, and geoJSON +# + +""" Utility functions for geo-coordinates and geoJSON +""" + +from typing import Union, Optional, cast +import json + +from shapely import Point, Polygon, LineString, MultiPoint, MultiLineString, MultiPolygon +from shapely.geometry.base import BaseGeometry + +from ..etc.Types import GeometryType + + +def getGeoPoint(jsn:Optional[Union[dict, str]]) -> Optional[tuple[float, float]]: + """ Get the geo-point from a geoJSON object. + + Args: + jsn: The geoJSON object as a dictionary or a string. + + Returns: + A tuple of the geo-point (latitude, longitude). None if not found or invalid JSON. + """ + if jsn is None: + return None + if isinstance(jsn, str): + try: + jsn = json.loads(jsn) + except ValueError: + return None + if cast(dict, jsn).get('type') != 'Point': + return None + if coordinates := cast(dict, jsn).get('coordinates'): + return coordinates[0], coordinates[1] + return None + + +def getGeoPolygon(jsn:Optional[Union[dict, str]]) -> Optional[list[tuple[float, float]]]: + """ Get the geo-polygon from a geoJSON object. + + Args: + jsn: The geoJSON object as a dictionary or a string. + + Returns: + A list of tuples of the geo-polygon (latitude, longitude). None if not found or invalid JSON. + """ + if jsn is None: + return None + if isinstance(jsn, str): + try: + jsn = json.loads(jsn) + except ValueError: + return None + if cast(dict, jsn).get('type') != 'Polygon': + return None + if coordinates := cast(dict, jsn).get('coordinates'): + return coordinates[0] + return None + + +def isLocationInsidePolygon(polygon:list[tuple[float, float]], location:tuple[float, float]) -> bool: + """ Check if a location is inside a polygon. + + Args: + polygon: The polygon as a list of tuples (latitude, longitude). + location: The location as a tuple (latitude, longitude). + + Returns: + True if the location is inside the polygon, False otherwise. + """ + return Polygon(polygon).contains(Point(location)) + + +def geoWithin(aType:GeometryType, aShape:tuple|list, bType:GeometryType, bShape:tuple|list) -> bool: + """ Check if a shape is within another shape. + + Args: + aType: The type of the first shape. + aShape: The shape of the first shape. + bType: The type of the second shape. + bShape: The shape of the second shape. + + Returns: + True if the first shape is (fully) within the second shape, False otherwise. + """ + return getGeoShape(aType, aShape).within(getGeoShape(bType, bShape)) + + +def geoContains(aType:GeometryType, aShape:tuple|list, bType:GeometryType, bShape:tuple|list) -> bool: + """ Check if a shape contains another shape. + + Args: + aType: The type of the first shape. + aShape: The shape of the first shape. + bType: The type of the second shape. + bShape: The shape of the second shape. + + Returns: + True if the first shape (fully) contains the second shape, False otherwise. + """ + return getGeoShape(aType, aShape).contains(getGeoShape(bType, bShape)) + + +def geoIntersects(aType:GeometryType, aShape:tuple|list, bType:GeometryType, bShape:tuple|list) -> bool: + """ Check if a shape intersects another shape. + + Args: + aType: The type of the first shape. + aShape: The shape of the first shape. + bType: The type of the second shape. + bShape: The shape of the second shape. + + Returns: + True if the first shape intersects the second shape, False otherwise. + """ + return getGeoShape(aType, aShape).intersects(getGeoShape(bType, bShape)) + + +def getGeoShape(typ:GeometryType, shape:tuple|list) -> BaseGeometry: + """ Get a shapely geometry object from a geoJSON shape. + + Args: + typ: The geometry type. + shape: The geoJSON shape as a tuple or list. + + Returns: + A shapely geometry object. + """ + try: + match typ: + case GeometryType.Point: + return Point(shape) + case GeometryType.LineString: + return LineString(shape) + case GeometryType.Polygon: + return Polygon(shape) + case GeometryType.MultiPoint: + return MultiPoint(shape) + case GeometryType.MultiLineString: + return MultiLineString(shape) + case GeometryType.MultiPolygon: + # Convert to list to polygons. This is necessary because shapely does not support + # passing a list of polygons to the MultiPolygon constructor. Those polygons must + # contain "hole" definitions. So we need to create Polygons first and then + # pass them to the MultiPolygon constructor. + ps:list[Polygon] = [] + for s in shape: + if not isinstance(s, list): + raise ValueError(f'Invalid geometry shape: {shape}') + ps.append(Polygon(s)) + return MultiPolygon(ps) + except TypeError as e: + raise ValueError(f'Invalid geometry shape: {shape} ({e})') diff --git a/acme/etc/RequestUtils.py b/acme/etc/RequestUtils.py index f654d336..7a49fa69 100644 --- a/acme/etc/RequestUtils.py +++ b/acme/etc/RequestUtils.py @@ -50,11 +50,13 @@ def deserializeData(data:bytes, ct:ContentSerializationType) -> Optional[JSON]: """ if len(data) == 0: return {} - if ct == ContentSerializationType.JSON: - return cast(JSON, json.loads(TextTools.removeCommentsFromJSON(data.decode('utf-8')))) - elif ct == ContentSerializationType.CBOR: - return cast(JSON, cbor2.loads(data)) - return None + match ct: + case ContentSerializationType.JSON: + return cast(JSON, json.loads(TextTools.removeCommentsFromJSON(data.decode('utf-8')))) + case ContentSerializationType.CBOR: + return cast(JSON, cbor2.loads(data)) + case _: + return None def toHttpUrl(url:str) -> str: @@ -67,12 +69,14 @@ def toHttpUrl(url:str) -> str: A valid URL with escaped special characters. """ u = list(urlparse(url)) - if u[2].startswith('///'): - u[2] = f'/_{u[2][2:]}' - url = urlunparse(u) - elif u[2].startswith('//'): - u[2] = f'/~{u[2][1:]}' - url = urlunparse(u) + match u[2]: + case x if x.startswith('///'): + u[2] = f'/_{u[2][2:]}' + url = urlunparse(u) + case x if x.startswith('//'): + u[2] = f'/~{u[2][1:]}' + url = urlunparse(u) + return url @@ -218,6 +222,10 @@ def requestFromResult(inResult:Result, # Result Content if inResult.request.drt: req['drt'] = int(inResult.request.drt) + + # Result Expiration Timestamp + if inResult.request.rset: + req['rset'] = inResult.request.rset diff --git a/acme/etc/ResponseStatusCodes.py b/acme/etc/ResponseStatusCodes.py index efa6ac25..04df5108 100644 --- a/acme/etc/ResponseStatusCodes.py +++ b/acme/etc/ResponseStatusCodes.py @@ -100,8 +100,8 @@ class ResponseStatusCode(ACMEIntEnum): INSUFFICIENT_ARGUMENTS = 6024 """ INSUFFICIENT_ARGUMENTS """ - UNKNOWN = -1 + """ UNKNOWN """ def httpStatusCode(self) -> int: @@ -158,6 +158,7 @@ def httpStatusCode(self) -> int: ResponseStatusCode.UNKNOWN : HTTPStatus.NOT_IMPLEMENTED, # NOT IMPLEMENTED } +""" Mapping of oneM2M return codes to http status codes. """ _successRSC = ( ResponseStatusCode.ACCEPTED, @@ -168,34 +169,53 @@ def httpStatusCode(self) -> int: ResponseStatusCode.DELETED, ResponseStatusCode.UPDATED, ) +""" The list of success response status codes. """ def isSuccessRSC(rsc:ResponseStatusCode) -> bool: + """ Check whether a response status code is a success code. + + Args: + rsc: The response status code to check. + + Returns: + True if the response status code is a success code, False otherwise. +""" return rsc in _successRSC class ResponseException(Exception): - """ Base class for CSE Exceptions. - - Attributes: - rsc: The response status code. - dbg: An optional debug message. - error: This is an error-related exception. - """ + """ Base class for CSE Exceptions.""" def __init__(self, rsc:ResponseStatusCode, dbg:Optional[str] = None, data:Optional[Any] = None) -> None: + """ Constructor. + + Args: + rsc: The response status code. + dbg: An optional debug message. + data: Optional data. + """ super().__init__() self.rsc = rsc + """ The response status code. """ self.dbg = dbg + """ An optional debug message. """ self.data = data + """ Optional data. """ class ALREADY_EXISTS(ResponseException): """ ALREADY EXISTS Response Status Code. """ def __init__(self, dbg: Optional[str] = None, data:Optional[Any] = None) -> None: + """ Constructor. + + Args: + dbg: An optional debug message. + data: Optional data. + """ super().__init__(ResponseStatusCode.ALREADY_EXISTS, dbg, data) @@ -203,6 +223,12 @@ class APP_RULE_VALIDATION_FAILED(ResponseException): """ APP RULE VALIDATION FAILED Response Status Code. """ def __init__(self, dbg: Optional[str] = None, data:Optional[Any] = None) -> None: + """ Constructor. + + Args: + dbg: An optional debug message. + data: Optional data. + """ super().__init__(ResponseStatusCode.APP_RULE_VALIDATION_FAILED, dbg, data) @@ -210,6 +236,12 @@ class BAD_REQUEST(ResponseException): """ BAD REQUEST Response Status Code. """ def __init__(self, dbg: Optional[str] = None, data:Optional[Any] = None) -> None: + """ Constructor. + + Args: + dbg: An optional debug message. + data: Optional data. + """ super().__init__(ResponseStatusCode.BAD_REQUEST, dbg, data) @@ -217,6 +249,12 @@ class CONFLICT(ResponseException): """ CONFLICT Response Status Code. """ def __init__(self, dbg: Optional[str] = None, data:Optional[Any] = None) -> None: + """ Constructor. + + Args: + dbg: An optional debug message. + data: Optional data. + """ super().__init__(ResponseStatusCode.CONFLICT, dbg, data) @@ -224,6 +262,12 @@ class CONTENTS_UNACCEPTABLE(ResponseException): """ CONTENTS UNACCEPTABLE Response Status Code. """ def __init__(self, dbg: Optional[str] = None, data:Optional[Any] = None) -> None: + """ Constructor. + + Args: + dbg: An optional debug message. + data: Optional data. + """ super().__init__(ResponseStatusCode.CONTENTS_UNACCEPTABLE, dbg, data) @@ -231,6 +275,12 @@ class CROSS_RESOURCE_OPERATION_FAILURE(ResponseException): """ CROSS RESOURCE OPERATION FAILURE Response Status Code. """ def __init__(self, dbg: Optional[str] = None, data:Optional[Any] = None) -> None: + """ Constructor. + + Args: + dbg: An optional debug message. + data: Optional data. + """ super().__init__(ResponseStatusCode.CROSS_RESOURCE_OPERATION_FAILURE, dbg, data) @@ -238,6 +288,12 @@ class GROUP_MEMBER_TYPE_INCONSISTENT(ResponseException): """ GROUP MEMBER TYPE INCONSISTENT Response Status Code. """ def __init__(self, dbg: Optional[str] = None, data:Optional[Any] = None) -> None: + """ Constructor. + + Args: + dbg: An optional debug message. + data: Optional data. + """ super().__init__(ResponseStatusCode.GROUP_MEMBER_TYPE_INCONSISTENT, dbg, data) @@ -245,6 +301,12 @@ class INSUFFICIENT_ARGUMENTS(ResponseException): """ INSUFFICIENT ARGUMENTS Response Status Code. """ def __init__(self, dbg: Optional[str] = None, data:Optional[Any] = None) -> None: + """ Constructor. + + Args: + dbg: An optional debug message. + data: Optional data. + """ super().__init__(ResponseStatusCode.INSUFFICIENT_ARGUMENTS, dbg, data) @@ -252,6 +314,12 @@ class INTERNAL_SERVER_ERROR(ResponseException): """ INTERNAL SERVER ERRROR Response Status Code. """ def __init__(self, dbg: Optional[str] = None, data:Optional[Any] = None) -> None: + """ Constructor. + + Args: + dbg: An optional debug message. + data: Optional data. + """ super().__init__(ResponseStatusCode.INTERNAL_SERVER_ERROR, dbg, data) @@ -259,6 +327,12 @@ class INVALID_CHILD_RESOURCE_TYPE(ResponseException): """ INVALID CHILD RESOURCE TYPE Response Status Code. """ def __init__(self, dbg: Optional[str] = None, data:Optional[Any] = None) -> None: + """ Constructor. + + Args: + dbg: An optional debug message. + data: Optional data. + """ super().__init__(ResponseStatusCode.INVALID_CHILD_RESOURCE_TYPE, dbg, data) @@ -266,6 +340,12 @@ class INVALID_ARGUMENTS(ResponseException): """ INVALID ARGUMENTS Response Status Code. """ def __init__(self, dbg: Optional[str] = None, data:Optional[Any] = None) -> None: + """ Constructor. + + Args: + dbg: An optional debug message. + data: Optional data. + """ super().__init__(ResponseStatusCode.INVALID_ARGUMENTS, dbg, data) @@ -273,6 +353,12 @@ class INVALID_SPARQL_QUERY(ResponseException): """ INVALID SPARQL QUERY Response Status Code. """ def __init__(self, dbg: Optional[str] = None, data:Optional[Any] = None) -> None: + """ Constructor. + + Args: + dbg: An optional debug message. + data: Optional data. + """ super().__init__(ResponseStatusCode.INVALID_SPARQL_QUERY, dbg, data) @@ -287,6 +373,12 @@ class NOT_ACCEPTABLE(ResponseException): """ NOT ACCEPTABLE Response Status Code. """ def __init__(self, dbg: Optional[str] = None, data:Optional[Any] = None) -> None: + """ Constructor. + + Args: + dbg: An optional debug message. + data: Optional data. + """ super().__init__(ResponseStatusCode.NOT_ACCEPTABLE, dbg, data) @@ -294,6 +386,12 @@ class NOT_FOUND(ResponseException): """ NOT FOUND Response Status Code. """ def __init__(self, dbg: Optional[str] = None, data:Optional[Any] = None) -> None: + """ Constructor. + + Args: + dbg: An optional debug message. + data: Optional data. + """ super().__init__(ResponseStatusCode.NOT_FOUND, dbg, data) @@ -301,6 +399,12 @@ class NOT_IMPLEMENTED(ResponseException): """ NOT IMPLEMENTED Response Status Code. """ def __init__(self, dbg: Optional[str] = None, data:Optional[Any] = None) -> None: + """ Constructor. + + Args: + dbg: An optional debug message. + data: Optional data. + """ super().__init__(ResponseStatusCode.NOT_IMPLEMENTED, dbg, data) @@ -308,6 +412,12 @@ class OPERATION_DENIED_BY_REMOTE_ENTITY(ResponseException): """ OPERATION DENIED BY REMOTE ENTITY Response Status Code. """ def __init__(self, dbg: Optional[str] = None, data:Optional[Any] = None) -> None: + """ Constructor. + + Args: + dbg: An optional debug message. + data: Optional data. + """ super().__init__(ResponseStatusCode.OPERATION_DENIED_BY_REMOTE_ENTITY, dbg, data) @@ -315,6 +425,12 @@ class OPERATION_NOT_ALLOWED(ResponseException): """ OPERATION NOT ALLOWED Response Status Code. """ def __init__(self, dbg: Optional[str] = None, data:Optional[Any] = None) -> None: + """ Constructor. + + Args: + dbg: An optional debug message. + data: Optional data. + """ super().__init__(ResponseStatusCode.OPERATION_NOT_ALLOWED, dbg, data) @@ -322,6 +438,12 @@ class ORIGINATOR_HAS_ALREADY_REGISTERED(ResponseException): """ ORIGINATOR HAS ALREADY REGISTERED Response Status Code. """ def __init__(self, dbg: Optional[str] = None, data:Optional[Any] = None) -> None: + """ Constructor. + + Args: + dbg: An optional debug message. + data: Optional data. + """ super().__init__(ResponseStatusCode.ORIGINATOR_HAS_ALREADY_REGISTERED, dbg, data) @@ -329,6 +451,12 @@ class ORIGINATOR_HAS_NO_PRIVILEGE(ResponseException): """ ORIGINATOR HAS NO PRIVILEGE Response Status Code. """ def __init__(self, dbg: Optional[str] = None, data:Optional[Any] = None) -> None: + """ Constructor. + + Args: + dbg: An optional debug message. + data: Optional data. + """ super().__init__(ResponseStatusCode.ORIGINATOR_HAS_NO_PRIVILEGE, dbg, data) @@ -336,6 +464,12 @@ class RECEIVER_HAS_NO_PRIVILEGES(ResponseException): """ RECEIVER HAS NO PRIVILEGES Response Status Code. """ def __init__(self, dbg: Optional[str] = None, data:Optional[Any] = None) -> None: + """ Constructor. + + Args: + dbg: An optional debug message. + data: Optional data. + """ super().__init__(ResponseStatusCode.RECEIVER_HAS_NO_PRIVILEGES, dbg, data) @@ -343,6 +477,12 @@ class RELEASE_VERSION_NOT_SUPPORTED(ResponseException): """ RELEASE VERSION NOT SUPPORTED Response Status Code. """ def __init__(self, dbg: Optional[str] = None, data:Optional[Any] = None) -> None: + """ Constructor. + + Args: + dbg: An optional debug message. + data: Optional data. + """ super().__init__(ResponseStatusCode.RELEASE_VERSION_NOT_SUPPORTED, dbg, data) @@ -350,6 +490,12 @@ class REMOTE_ENTITY_NOT_REACHABLE(ResponseException): """ REMOTE ENTITY NOT REACHABLE Response Status Code. """ def __init__(self, dbg: Optional[str] = None, data:Optional[Any] = None) -> None: + """ Constructor. + + Args: + dbg: An optional debug message. + data: Optional data. + """ super().__init__(ResponseStatusCode.REMOTE_ENTITY_NOT_REACHABLE, dbg, data) @@ -357,6 +503,12 @@ class REQUEST_TIMEOUT(ResponseException): """ REQUEST TIMEOUT Response Status Code. """ def __init__(self, dbg: Optional[str] = None, data:Optional[Any] = None) -> None: + """ Constructor. + + Args: + dbg: An optional debug message. + data: Optional data. + """ super().__init__(ResponseStatusCode.REQUEST_TIMEOUT, dbg, data) @@ -364,6 +516,12 @@ class SECURITY_ASSOCIATION_REQUIRED(ResponseException): """ SECURITY ASSOCIATION REQUIRED Response Status Code. """ def __init__(self, dbg: Optional[str] = None, data:Optional[Any] = None) -> None: + """ Constructor. + + Args: + dbg: An optional debug message. + data: Optional data. + """ super().__init__(ResponseStatusCode.SECURITY_ASSOCIATION_REQUIRED, dbg, data) @@ -371,6 +529,12 @@ class SERVICE_SUBSCRIPTION_NOT_ESTABLISHED(ResponseException): """ SERVICE SUBSCRIPTION NOT ESTABLISHED Response Status Code. """ def __init__(self, dbg: Optional[str] = None, data:Optional[Any] = None) -> None: + """ Constructor. + + Args: + dbg: An optional debug message. + data: Optional data. + """ super().__init__(ResponseStatusCode.SERVICE_SUBSCRIPTION_NOT_ESTABLISHED, dbg, data) @@ -378,6 +542,12 @@ class SUBSCRIPTION_CREATER_HAS_NO_PRIVILEGE(ResponseException): """ SUBSCRIPTION CREATER HAS NO PRIVILEGE Response Status Code. """ def __init__(self, dbg: Optional[str] = None, data:Optional[Any] = None) -> None: + """ Constructor. + + Args: + dbg: An optional debug message. + data: Optional data. + """ super().__init__(ResponseStatusCode.SUBSCRIPTION_CREATER_HAS_NO_PRIVILEGE, dbg, data) @@ -385,6 +555,12 @@ class SUBSCRIPTION_HOST_HAS_NO_PRIVILEGE(ResponseException): """ SUBSCRIPTION HOST HAS NO PRIVILEGE Response Status Code. """ def __init__(self, dbg: Optional[str] = None, data:Optional[Any] = None) -> None: + """ Constructor. + + Args: + dbg: An optional debug message. + data: Optional data. + """ super().__init__(ResponseStatusCode.SUBSCRIPTION_HOST_HAS_NO_PRIVILEGE, dbg, data) @@ -392,6 +568,12 @@ class SUBSCRIPTION_VERIFICATION_INITIATION_FAILED(ResponseException): """ SUBSCRIPTION VERIFICATION INITIATION FAILED Response Status Code. """ def __init__(self, dbg: Optional[str] = None, data:Optional[Any] = None) -> None: + """ Constructor. + + Args: + dbg: An optional debug message. + data: Optional data. + """ super().__init__(ResponseStatusCode.SUBSCRIPTION_VERIFICATION_INITIATION_FAILED, dbg, data) @@ -399,6 +581,12 @@ class TARGET_NOT_REACHABLE(ResponseException): """ TARGET NOT REACHABLE Response Status Code. """ def __init__(self, dbg: Optional[str] = None, data:Optional[Any] = None) -> None: + """ Constructor. + + Args: + dbg: An optional debug message. + data: Optional data. + """ super().__init__(ResponseStatusCode.TARGET_NOT_REACHABLE, dbg, data) @@ -406,6 +594,12 @@ class TARGET_NOT_SUBSCRIBABLE(ResponseException): """ TARGET NOT SUBSCRIBABLE Response Status Code. """ def __init__(self, dbg: Optional[str] = None, data:Optional[Any] = None) -> None: + """ Constructor. + + Args: + dbg: An optional debug message. + data: Optional data. + """ super().__init__(ResponseStatusCode.TARGET_NOT_SUBSCRIBABLE, dbg, data) @@ -413,6 +607,12 @@ class UNSUPPORTED_MEDIA_TYPE(ResponseException): """ UNSUPPORTED MEDIA TYPE Response Status Code. """ def __init__(self, dbg: Optional[str] = None, data:Optional[Any] = None) -> None: + """ Constructor. + + Args: + dbg: An optional debug message. + data: Optional data. + """ super().__init__(ResponseStatusCode.UNSUPPORTED_MEDIA_TYPE, dbg, data) @@ -454,9 +654,13 @@ def __init__(self, dbg: Optional[str] = None, data:Optional[Any] = None) -> None def exceptionFromRSC(rsc:ResponseStatusCode) -> Optional[Type[ResponseException]]: - return _mapping.get(rsc) - - - + """ Get the exception class for a Response Status Code. + + Args: + rsc: The Response Status Code. + Returns: + The exception class or None if not found. + """ + return _mapping.get(rsc) diff --git a/acme/etc/Types.py b/acme/etc/Types.py index 07eef7b5..f4598e3a 100644 --- a/acme/etc/Types.py +++ b/acme/etc/Types.py @@ -56,6 +56,8 @@ class ResourceTypes(ACMEIntEnum): """ CSEBase resource type. """ GRP = 9 """ Group resouce type. """ + LCP = 10 + """ LocationPolicy resource type. """ MGMTOBJ = 13 """ ManagementObject resource type. """ NOD = 14 @@ -66,6 +68,8 @@ class ResourceTypes(ACMEIntEnum): """ Remote CSE resource type. """ REQ = 17 """ Request resource type. """ + SCH = 18 + """ Schedule resource type. """ SUB = 23 """ Subscription resource type. """ SMD = 24 @@ -85,7 +89,7 @@ class ResourceTypes(ACMEIntEnum): ACTR = 65 """ Action resource type. """ DEPR = 66 - + """ Dependency resource type. """ # Virtual resources (some are proprietary resource types) @@ -152,12 +156,16 @@ class ResourceTypes(ACMEIntEnum): """ Announced CSEBase resource type. """ GRPAnnc = 10009 """ Announced Group resouce type. """ + LCPAnnc = 10010 + """ Announced LocationPolicy resource type. """ MGMTOBJAnnc = 10013 """ Announced ManagementObject resource type. """ NODAnnc = 10014 """ Announced Node resource type. """ CSRAnnc = 10016 """ Announced Remote CSE resource type. """ + SCHAnnc = 10018 + """ Announced Schedule resource type. """ SMDAnnc = 10024 """ Announced SemanticDescriptor resouce type. """ FCNTAnnc = 10028 @@ -171,7 +179,6 @@ class ResourceTypes(ACMEIntEnum): ACTRAnnc = 10065 """ Announced Action resource type. """ DEPRAnnc = 10066 - """ Announced Dependency resource type. """ FWRAnnc = -30001 """ Announced Firmware ManagementObject specialization. """ @@ -202,10 +209,22 @@ class ResourceTypes(ACMEIntEnum): def tpe(self) -> str: + """ Get the resource type name. + + Return: + The resource type name. + """ return _ResourceTypesNames.get(self) def announced(self, mgd:Optional[ResourceTypes] = None) -> ResourceTypes: + """ Get the announced resource type for a resource type. + + Args: + mgd: The mgmtObj specialization type. Only used for mgmtObjs. + Return: + The announced resource type, or UNKNOWN. + """ if self != ResourceTypes.MGMTOBJ: # Handling for non-mgmtObjs @@ -379,18 +398,32 @@ def fullname(cls, ty:int) -> str: @dataclass() class ResourceDescription(): + """ Describes a resource type. + """ typeName:str = None + """ The resource type name. """ announcedType:ResourceTypes = None + """ The announced resource type. """ isAnnouncedResource:bool= False + """ Whether the resource type is an announced resource type. """ isMgmtSpecialization:bool = False + """ Whether the resource type is a mgmtObj specialization. """ isInstanceResource:bool = False + """ Whether the resource type is an instance resource. """ isInternalType:bool = False + """ Whether the resource type is an internal type. """ virtualResourceName:str = None # If this is set then the resource is a virtual resouce + """ The name of a virtual resource. """ clazz:Resource = None # type:ignore [name-defined] + """ The resource class. """ factory:FactoryCallableT = None + """ The resource factory callable to create this resource. """ isRequestCreatable:bool = True # Can be created by a request + """ Whether the resource type can be created by a request. """ isNotificationEntity:bool = False # Is a direct notification target + """ Whether the resource type is a direct notification target. """ fullName:str = '' # Full name of the resource type + """ The full name of the resource type. """ _ResourceTypeDetails = { @@ -422,6 +455,8 @@ class ResourceDescription(): ResourceTypes.GRP : ResourceDescription(typeName = 'm2m:grp', announcedType = ResourceTypes.GRPAnnc, fullName='Group'), ResourceTypes.GRPAnnc : ResourceDescription(typeName = 'm2m:grpA', isAnnouncedResource = True, fullName='Group Announced'), ResourceTypes.GRP_FOPT : ResourceDescription(typeName = 'm2m:fopt', virtualResourceName = 'fopt', fullName='Fanout Point'), # not an official type name + ResourceTypes.LCP : ResourceDescription(typeName = 'm2m:lcp', announcedType = ResourceTypes.LCPAnnc, fullName='LocationPolicy'), + ResourceTypes.LCPAnnc : ResourceDescription(typeName = 'm2m:lcpA', isAnnouncedResource = True, fullName='LocationPolicy Announced'), ResourceTypes.MGMTOBJ : ResourceDescription(typeName = 'm2m:mgo', announcedType = ResourceTypes.MGMTOBJAnnc, fullName = 'ManagementObject'), # not an official type name ResourceTypes.MGMTOBJAnnc : ResourceDescription(typeName = 'm2m:mgoA', isAnnouncedResource = True, fullName = 'ManagementObject Announced'), # not an official type name ResourceTypes.NOD : ResourceDescription(typeName = 'm2m:nod', announcedType = ResourceTypes.NODAnnc, fullName='Node'), @@ -429,6 +464,8 @@ class ResourceDescription(): ResourceTypes.PCH : ResourceDescription(typeName = 'm2m:pch', fullName='PollingChannel'), ResourceTypes.PCH_PCU : ResourceDescription(typeName = 'm2m:pcu', virtualResourceName = 'pcu', fullName='PollingChannel URI'), ResourceTypes.REQ : ResourceDescription(typeName = 'm2m:req', isRequestCreatable = False, fullName='Request'), + ResourceTypes.SCH : ResourceDescription(typeName = 'm2m:sch', announcedType = ResourceTypes.SCHAnnc, fullName='Schedule'), + ResourceTypes.SCHAnnc : ResourceDescription(typeName = 'm2m:schA', isAnnouncedResource = True, fullName='Schedule Announced'), ResourceTypes.SMD : ResourceDescription(typeName = 'm2m:smd', announcedType = ResourceTypes.SMDAnnc, fullName='SemanticDescriptor'), ResourceTypes.SMDAnnc : ResourceDescription(typeName = 'm2m:smdA', isAnnouncedResource = True, fullName='SemanticDescriptor Announced'), ResourceTypes.SUB : ResourceDescription(typeName = 'm2m:sub', fullName='Subscription'), @@ -477,6 +514,7 @@ class ResourceDescription(): ResourceTypes.COMPLEX : ResourceDescription(typeName = 'complex', isInternalType = True), } +""" Mapping between resource types and their description. """ def addResourceFactoryCallback(ty:ResourceTypes, clazz:Resource, factory:FactoryCallableT) -> None: # type:ignore [name-defined] @@ -524,7 +562,7 @@ def addResourceFactoryCallback(ty:ResourceTypes, clazz:Resource, factory:Factory _ResourceTypesAnnouncedResourceTypes.sort() -_ResourceTypesSupportedResourceTypes = [ t +_ResourceTypesSupportedResourceTypes:list[ResourceTypes] = [ t for t, d in _ResourceTypeDetails.items() if not d.isMgmtSpecialization and not d.virtualResourceName and not d.isInternalType and t != ResourceTypes.CSEBaseAnnc] """ Sorted list of supported resource types (without MgmtObj spezializations and virtual resources). """ @@ -583,31 +621,62 @@ class BasicType(ACMEIntEnum): """ Basic resource types. """ - positiveInteger = auto() - nonNegInteger = auto() - unsignedInt = auto() - unsignedLong = auto() - string = auto() - timestamp = auto() - absRelTimestamp = auto() - list = auto() - listNE = auto() # Not empty list - dict = auto() - anyURI = auto() - boolean = auto() - float = auto() - geoCoordinates = auto() - integer = auto() - void = auto() - duration = auto() - any = auto() - complex = auto() - enum = auto() - adict = auto() # anoymous dict structure - base64 = auto() - schedule = auto() # scheduleEntry - time = timestamp # alias type for time - date = timestamp # alias type for date + positiveInteger = auto() + """ Positive integer. """ + nonNegInteger = auto() + """ Non-negative integer. """ + unsignedInt = auto() + """ Unsigned integer. """ + unsignedLong = auto() + """ Unsigned long. """ + string = auto() + """ String. """ + timestamp = auto() + """ Timestamp. """ + absRelTimestamp = auto() + """ Absolute or relative timestamp. """ + list = auto() + """ List. """ + listNE = auto() # Not empty list + """ Not empty list. """ + dict = auto() + """ Dictionary or sub-structure. """ + anyURI = auto() + """ Any URI. """ + boolean = auto() + """ Boolean. """ + float = auto() + """ Float. """ + geoJsonCoordinate = auto() + """ GeoJSON coordinate. """ + integer = auto() + """ Integer. """ + void = auto() + """ Void. """ + duration = auto() + """ Duration. """ + any = auto() + """ Any type. """ + complex = auto() + """ Complex type. """ + enum = auto() + """ Enumeration. """ + adict = auto() # anoymous dict structure + """ Anonymous dictionary. """ + base64 = auto() + """ Base64 encoded data. """ + schedule = auto() # scheduleEntry + """ Schedule entry. """ + ID = auto() # m2m:ID + """ oneM2M ID. """ + ncname = auto() # xs:NCName + """ XML NCName. """ + + # aliases. Always put at the end! Seems cause confusion with python < 3.11 + time = timestamp # alias type for time + """ Alias for timestamp. """ + date = timestamp # alias type for date + """ Alias for timestamp. """ @classmethod def to(cls, name:str|Tuple[str], insensitive:Optional[bool] = True) -> BasicType: @@ -624,21 +693,19 @@ def to(cls, name:str|Tuple[str], insensitive:Optional[bool] = True) -> BasicType class Cardinality(ACMEIntEnum): """ Resource attribute cardinalities. - - Attributes: - CAR1: Mandatory. - CAR1L: Mandatory list. - CAR1LN: Mandatory list that shall not be empty. - CAR01: Optional. - CAR01L: Optional list. - CAR1N: Mandatory but may be Null/None. """ CAR1 = auto() + """ Mandatory. """ CAR1L = auto() + """ Mandatory list. """ CAR1LN = auto() + """ Mandatory list that shall not be empty. """ CAR01 = auto() + """ Optional. """ CAR01L = auto() + """ Optional list. """ CAR1N = auto() + """ Mandatory but may be Null/None. """ @classmethod def hasCar(cls, name:str) -> bool: @@ -691,24 +758,17 @@ def _prepare(name:str) -> str: class RequestOptionality(ACMEIntEnum): """ Request optionality enum values. - - Attributes: - NP: Not provided. - O: Optional. - M: Mandatory. """ NP = auto() + """ Not provided. """ O = auto() + """ Optional. """ M = auto() + """ Mandatory. """ class Announced(ACMEIntEnum): """ Anouncement attribute enum values. - - Attributes: - NA: Not announced. - OA: Optionally announced. - MA: Mandatory announced. """ NA = auto() @@ -745,41 +805,47 @@ class EvalCriteriaOperator(ACMEIntEnum): """ Less than or equal. """ def isAllowedType(self, typ:BasicType) -> bool: - # Ordered types are allowed for all operators - if typ in [ BasicType.positiveInteger, - BasicType.nonNegInteger, - BasicType.unsignedInt, - BasicType.unsignedLong, - BasicType.timestamp, - BasicType.absRelTimestamp, - BasicType.float, - BasicType.integer, - BasicType.duration, - BasicType.enum, - BasicType.time, - BasicType.date ]: - return True - # Equal and unequal are the only operators allowed for all other types - if self.value in [ EvalCriteriaOperator.equal, - EvalCriteriaOperator.notEqual ]: - return True - # Not allowed - return False + """ Check if the given BasicType is allowed for the current EvalCriteriaOperator. + + Args: + typ: The BasicType to check. + + Returns: + True if the BasicType is allowed for the current EvalCriteriaOperator, False otherwise. + """ + # Ordered types are allowed for all operators + if typ in [ BasicType.positiveInteger, + BasicType.nonNegInteger, + BasicType.unsignedInt, + BasicType.unsignedLong, + BasicType.timestamp, + BasicType.absRelTimestamp, + BasicType.float, + BasicType.integer, + BasicType.duration, + BasicType.enum, + BasicType.time, + BasicType.date ]: + return True + # Equal and unequal are the only operators allowed for all other types + if self.value in [ EvalCriteriaOperator.equal, + EvalCriteriaOperator.notEqual ]: + return True + # Not allowed + return False class EvalMode(ACMEIntEnum): - """ Eval Mode enum values. """ + """ Eval Mode enum values. + """ off = 0 """ Evaluation off. """ - once = 1 """ Evaluation once. """ - periodic = 2 """ Evaluation periodic. """ - continous = 3 """ Evaluation continous. """ @@ -791,7 +857,8 @@ class EvalMode(ACMEIntEnum): # class Permission(ACMEIntEnum): - """ Permissions """ + """ Permissions. + """ NONE = 0 """ No permission """ CREATE = 1 @@ -836,84 +903,6 @@ def fromBitfield(cls, bitfield:int) -> List[Permission]: if bitfield == Permission.ALL.value: return [ Permission.ALL ] return [ p for p in Permission if p != Permission.ALL and p & bitfield ] - - -# #/usr/local/bin/python3 acop.py {query} -# import sys - -# operations = [ -# (32, 'DISCOVERY', 'i'), -# (16, 'NOTIFY', 'n'), -# ( 8, 'DELETE', 'd'), -# ( 4, 'UPDATE', 'u'), -# ( 2, 'RETRIEVE', 'r'), -# ( 1, 'CREATE', 'c') -# ] - -# def bitfield(n, length = 6): -# r = [int(digit) for digit in bin(n)[2:]] -# while len(r) < length: -# r.insert(0, 0) -# return r - - -# def opsBitfield(field): -# sm = [] -# for i in range(len(field)-1, -1, -1): -# if field[i]: -# sm.append(operations[i][1]) -# return ', '.join(sm) - - -# def toBitfield(query): -# r = [] -# for each in query.lower(): -# for op in operations: -# if each == op[2]: -# if op[0] not in r: -# r.append(op[0]) -# break # break for if found -# else: -# return -1 # return error if for did not exit - -# return sum(r) - - - - -# qu = sys.argv[1] -# try: -# query = int(qu) - -# except ValueError: -# # Not a number, so try to calculate the reverse -# result = toBitfield(qu) -# if result > 0: -# result = str(result) -# print('') -# #print('Access Control Operations') -# print('' + qu + ' = ' + result + '') -# print('' + result + '') -# print('' + result + '') -# print('') - -# else: -# error() - -# else: -# # If no exception, ie. query is an integer -# if 0 < query < 64: -# result = 'ALL' if query == 63 else opsBitfield(bitfield(query)) -# print('') -# #print('Access Control Operations') -# print('' + qu + ' = ' + result + '') -# print('' + result + '') -# print('' + result + '') -# print('') -# else: -# error() - - ############################################################################## @@ -978,17 +967,29 @@ def toOperation(cls, v:Optional[int]) -> Optional[Operation]: class ResultContentType(ACMEIntEnum): """ Result Content Types """ nothing = 0 + """ Nothing. """ attributes = 1 + """ Resource Attributes. """ hierarchicalAddress = 2 + """ Hierarchical Address. """ hierarchicalAddressAttributes = 3 + """ Hierarchical Address and Attributes. """ attributesAndChildResources = 4 + """ Attributes and Child Resources. """ attributesAndChildResourceReferences = 5 + """ Attributes and Child Resource References. """ childResourceReferences = 6 + """ Child Resource References. """ originalResource = 7 + """ Original Resource. """ childResources = 8 + """ Child Resources. """ modifiedAttributes = 9 + """ Modified Attributes. """ semanticContent = 10 + """ Semantic Content. """ discoveryResultReferences = 11 + """ Discovery Result References. """ def validForOperation(self, op:Operation) -> bool: @@ -1003,6 +1004,14 @@ def validForOperation(self, op:Operation) -> bool: @classmethod def default(cls, op:Operation) -> ResultContentType: + """ Get the default Result Content for an operation. + + Args: + op: The operation to get the default Result Content for. + + Return: + The default Result Content for the operation. + """ return _ResultContentTypeDefaults[op] @@ -1033,6 +1042,7 @@ def default(cls, op:Operation) -> ResultContentType: ResultContentType.childResourceReferences ], Operation.NOTIFY: [ ResultContentType.nothing ], } +""" Mappings between request operations and allowed Result Content """ _ResultContentTypeDefaults = { Operation.RETRIEVE: ResultContentType.attributes, @@ -1042,6 +1052,7 @@ def default(cls, op:Operation) -> ResultContentType: Operation.DELETE: ResultContentType.nothing, Operation.NOTIFY: None, } +""" Mappings between request operations and default Result Content """ # ResultContentType.discoveryRCN = [ ResultContentType.discoveryResultReferences, # type: ignore @@ -1050,22 +1061,31 @@ def default(cls, op:Operation) -> ResultContentType: class FilterOperation(ACMEIntEnum): """ Filter Operation """ AND = 1 # default + """ AND. The default. """ OR = 2 + """ OR. """ XOR = 3 + """ XOR. """ class FilterUsage(ACMEIntEnum): """ Filter Usage """ discoveryCriteria = 1 + """ Discovery Criteria. """ conditionalRetrieval = 2 # default + """ Conditional Retrieval. The default. """ ipeOnDemandDiscovery = 3 + """ IPE On-Demand Discovery. """ discoveryBasedOperation = 4 + """ Discovery Based Operation. """ class DesiredIdentifierResultType(ACMEIntEnum): """ Desired Identifier Result Type """ structured = 1 # default + """ Structured. """ unstructured = 2 + """ Unstructured. """ ############################################################################## @@ -1076,17 +1096,25 @@ class DesiredIdentifierResultType(ACMEIntEnum): class CSEType(ACMEIntEnum): """ CSE Types """ IN = 1 + """ Infrastructure Node. """ MN = 2 + """ Middle Node. """ ASN = 3 + """ Access Node. """ class CSEStatus(ACMEIntEnum): """ CSE Status """ STOPPED = auto() + """ CSE is stopped. """ STARTING = auto() + """ CSE is starting. """ RUNNING = auto() + """ CSE is running. """ STOPPING = auto() + """ CSE is stopping. """ RESETTING = auto() + """ CSE is resetting. """ ############################################################################## # @@ -1119,10 +1147,15 @@ class ResponseType(ACMEIntEnum): class RequestStatus(ACMEIntEnum): """ Reponse Types """ COMPLETED = 1 + """ Completed. """ FAILED = 2 + """ Failed. """ PENDING = 3 + """ Pending. """ FORWARDED = 4 + """ Forwarded. """ PARTIALLY_COMPLETED = 5 + """ Partially completed. """ ############################################################################## @@ -1131,10 +1164,13 @@ class RequestStatus(ACMEIntEnum): # class EventCategory(ACMEIntEnum): - """ Event Categories """ + """ Event Categories from m2m:stdEventCats """ Immediate = 2 + """ Immediate event. """ BestEffort = 3 + """ Best effort event. """ Latest = 4 + """ Only latest event. """ ############################################################################## @@ -1147,65 +1183,101 @@ class ContentSerializationType(ACMEIntEnum): """ XML = auto() + """ XML. """ JSON = auto() + """ JSON. """ CBOR = auto() + """ CBOR. """ PLAIN = auto() - NA = auto() + """ Plain text. """ UNKNOWN = auto() + """ Unknown. """ def toHeader(self) -> str: - """ Return the mime header for a enum value. + """ Return the mime header for an enum value. + + Return: + The mime header for an enum value. """ - if self.value == self.JSON: return 'application/json' - if self.value == self.CBOR: return 'application/cbor' - if self.value == self.XML: return 'application/xml' - return None + match self.value: + case self.JSON: + return 'application/json' + case self.CBOR: + return 'application/cbor' + case self.XML: + return 'application/xml' + case _: + return None + def toSimple(self) -> str: - """ Return the simple string for a enum value. + """ Return the simple string for an enum value. + + Return: + The simple string for an enum value. """ - if self.value == self.JSON: return 'json' - if self.value == self.CBOR: return 'cbor' - if self.value == self.XML: return 'xml' - return None + match self.value: + case self.JSON: + return 'json' + case self.CBOR: + return 'cbor' + case self.XML: + return 'xml' + case _: + return None + @classmethod def toContentSerialization(cls, t:str) -> ContentSerializationType: - """ Return the enum from a string. + """ Return the enum from a string for a content serialization. + + Args: + t: String to convert. + + Return: + The enum value. """ - t = t.lower() - if t in [ 'cbor', 'application/cbor' ]: return cls.CBOR - if t in [ 'json', 'application/json' ]: return cls.JSON - if t in [ 'xml', 'application/xml' ]: return cls.XML - return cls.UNKNOWN + match t.lower(): + case 'json' | 'application/json': + return cls.JSON + case 'cbor' | 'application/cbor': + return cls.CBOR + case 'xml' | 'application/xml': + return cls.XML + case _: + return cls.UNKNOWN @classmethod - def getType(cls, hdr:str, default:Optional[ContentSerializationType] = None) -> ContentSerializationType: - """ Return the enum from a header definition. - """ - default = cls.UNKNOWN if not default else default - if not hdr: return default - hdr = hdr.lower() + def getType(cls, t:str, default:Optional[ContentSerializationType] = None) -> ContentSerializationType: + """ Return the enum from a content-type header definition. - if hdr.lower() == 'json': return cls.JSON - if hdr.lower().startswith('application/json'): return cls.JSON - if hdr.lower().startswith('application/vnd.onem2m-res+json'): return cls.JSON - - if hdr.lower() == 'cbor': return cls.CBOR - if hdr.lower().startswith('application/cbor'): return cls.CBOR - if hdr.lower().startswith('application/vnd.onem2m-res+cbor'): return cls.CBOR - - if hdr.lower() == 'xml': return cls.XML - if hdr.lower().startswith('application/xml'): return cls.XML - if hdr.lower().startswith('application/vnd.onem2m-res+XML'): return cls.XML + Args: + t: String to convert. + default: Default value to return if the string is not a valid content-type. - return cls.UNKNOWN + Return: + The enum value. + """ + if not t: + return cls.UNKNOWN if not default else default + match t.lower(): + case 'json' | 'application/json' | 'application/vnd.onem2m-res+json': + return cls.JSON + case 'cbor' | 'application/cbor' | 'application/vnd.onem2m-res+cbor': + return cls.CBOR + case 'xml' | 'application/xml' | 'application/vnd.onem2m-res+xml': + return cls.XML + case _: + return cls.UNKNOWN @classmethod def supportedContentSerializations(cls) -> list[str]: """ Return a list of supported media types for content serialization. + + Return: + A list of supported media types for content serialization. """ return [ 'application/json', 'application/vnd.onem2m-res+json', @@ -1217,11 +1289,22 @@ def supportedContentSerializations(cls) -> list[str]: def supportedContentSerializationsSimple(cls) -> list[str]: """ Return a simplified (only the names of the serializations) list of supported media types for content serialization. + + Return: + A list of supported media types for content serialization. """ return [ cls.JSON.toSimple(), cls.CBOR.toSimple() ] def __eq__(self, other:object) -> bool: + """ Compare two ContentSerializationType enums for equality. + + Args: + other: The other enum to compare with. + + Return: + True if the enums are equal. + """ if not isinstance(other, str): return NotImplemented return self.value == self.getType(str(other)) @@ -1235,8 +1318,11 @@ def __eq__(self, other:object) -> bool: class ConsistencyStrategy(ACMEIntEnum): """ Consistency Strategy """ abandonMember = 1 # default + """ Abandon member. The default. """ abandonGroup = 2 + """ Abandon group. """ setMixed = 3 + """ Set mixed. """ ############################################################################## @@ -1247,25 +1333,41 @@ class ConsistencyStrategy(ACMEIntEnum): class NotificationContentType(ACMEIntEnum): """ Notification Content Types """ allAttributes = 1 + """ All Attributes. """ modifiedAttributes = 2 + """ Modified Attributes. """ ri = 3 + """ Resource Identifier. """ triggerPayload = 4 + """ Trigger Payload. """ timeSeriesNotification = 5 + """ Time Series Notification. """ class NotificationEventType(ACMEIntEnum): """ eventNotificationCriteria/NotificationEventTypes """ - resourceUpdate = 1 # A, default - resourceDelete = 2 # B + + resourceUpdate = 1 # A, default + """ Resource Update (the default).""" + resourceDelete = 2 # B + """ Resource Delete. """ createDirectChild = 3 # C + """ Create Direct Child. """ deleteDirectChild = 4 # D + """ Delete Direct Child. """ retrieveCNTNoChild = 5 # E # TODO not supported yet + """ Retrieve CNT No Child. """ triggerReceivedForAE = 6 # F # TODO not supported yet + """ Trigger Received For AE. """ blockingUpdate = 7 # G + """ Blocking Update. """ # TODO spec and implementation for blockingUpdateDirectChild = ??? reportOnGeneratedMissingDataPoints = 8 # H + """ Report On Generated Missing Data Points. """ blockingRetrieve = 9 # I # EXPERIMENTAL + """ Blocking Retrieve. """ blockingRetrieveDirectChild = 10 # J # EXPERIMENTAL + """ Blocking Retrieve Direct Child. """ def isAllowedNCT(self, nct:NotificationContentType) -> bool: @@ -1277,24 +1379,27 @@ def isAllowedNCT(self, nct:NotificationContentType) -> bool: Return: True if the NotificationEventType is allowed for the NotificationContentType. """ - if nct == NotificationContentType.allAttributes: - return self.value in [ NotificationEventType.resourceUpdate, - NotificationEventType.resourceDelete, - NotificationEventType.createDirectChild, - NotificationEventType.deleteDirectChild ] - elif nct == NotificationContentType.modifiedAttributes: - return self.value in [ NotificationEventType.resourceUpdate, - NotificationEventType.blockingUpdate ] - elif nct == NotificationContentType.ri: - return self.value in [ NotificationEventType.resourceUpdate, - NotificationEventType.resourceDelete, - NotificationEventType.createDirectChild, - NotificationEventType.deleteDirectChild ] - elif nct == NotificationContentType.triggerPayload: - return self.value in [ NotificationEventType.triggerReceivedForAE ] - elif nct == NotificationContentType.timeSeriesNotification: - return self.value in [ NotificationEventType.reportOnGeneratedMissingDataPoints ] - return False + match nct: + case NotificationContentType.allAttributes: + return self.value in [ NotificationEventType.resourceUpdate, + NotificationEventType.resourceDelete, + NotificationEventType.createDirectChild, + NotificationEventType.deleteDirectChild ] + case NotificationContentType.modifiedAttributes: + return self.value in [ NotificationEventType.resourceUpdate, + NotificationEventType.blockingUpdate ] + case NotificationContentType.ri: + return self.value in [ NotificationEventType.resourceUpdate, + NotificationEventType.resourceDelete, + NotificationEventType.createDirectChild, + NotificationEventType.deleteDirectChild ] + case NotificationContentType.triggerPayload: + return self.value in [ NotificationEventType.triggerReceivedForAE ] + case NotificationContentType.timeSeriesNotification: + return self.value in [ NotificationEventType.reportOnGeneratedMissingDataPoints ] + case _: + return False + def defaultNCT(self) -> NotificationContentType: """ Return the default NotificationContentType for this NotificationEventType. @@ -1315,6 +1420,7 @@ def defaultNCT(self) -> NotificationContentType: NotificationEventType.blockingUpdate: NotificationContentType.modifiedAttributes, NotificationEventType.reportOnGeneratedMissingDataPoints: NotificationContentType.timeSeriesNotification } +""" Mappings between NotificationEventType and default NotificationContentType """ ############################################################################## # @@ -1324,24 +1430,42 @@ def defaultNCT(self) -> NotificationContentType: @dataclass class MissingData: """ Data class for collecting the missing data states. """ + subscriptionRi:str + """ Subscription resource identifier. """ missingDataDuration:float + """ Missing data duration. """ missingDataNumber:int + """ Missing data number. """ timeWindowEndTimestamp:float = None + """ Time window end timestamp. """ missingDataList:list[str] = field(default_factory=list) + """ Missing data list. """ missingDataCurrentNr:int = 0 + """ Missing data current number. """ def clear(self) -> None: + """ Clear the missing data states. + """ + self.timeWindowEndTimestamp = None self.clearMissingDataList() def clearMissingDataList(self) -> None: + """ Clear the missing data list. + """ + self.missingDataList = [] self.missingDataCurrentNr = 0 def asDict(self) -> JSON: + """ Return the missing data as a dictionary. + + Return: + The missing data as a dictionary. + """ return { 'mdlt': self.missingDataList, 'mdc' : self.missingDataCurrentNr @@ -1355,20 +1479,29 @@ class LastTSInstance: # runtime attributes dgt:list[float] = field(default_factory = lambda: [0]) + """ List of data generation times. """ expectedDgt:float = 0.0 + """ Expected data generation time. """ missingDataDetectionTime:float = 0.0 + """ Missing data detection time. """ # attributes pei:float = 0.0 + """ Periodic interval. """ mdt:float = 0.0 + """ Missing data detection time. """ peid:float = 0.0 + """ Periodic interval duration. """ # Subscriptions missingData:dict[str, MissingData] = field(default_factory = dict) + """ Missing data. """ # Internal actor:BackgroundWorker = None #type:ignore[name-defined] # actor for this TS + """ Actor for this TS.""" running:bool = False # for late activation of this + """ Running. """ def prepareNextDgt(self) -> None: @@ -1384,6 +1517,11 @@ def prepareNextRun(self) -> None: def addDgt(self, dgt:float) -> None: + """ Add a data generation time to the list of data generation times. + + Args: + dgt: The data generation time to add. + """ # TODO really support list. currently only one dgt is put, but # always overrides the old one. # Also change declaration of dgt above @@ -1394,16 +1532,31 @@ def addDgt(self, dgt:float) -> None: def nextDgt(self) -> float: + """ Get the next expected data generation time. + + Return: + The next expected data generation time. + """ if len(self.dgt) == 0: return None return self.dgt.pop(0) def hasDgt(self) -> bool: + """ Check if there is a data generation time. + + Return: + True if there is a data generation time. + """ return len(self.dgt) > 0 def clearDgt(self) -> None: + """ Clear the data generation time. + + Return: + True if there is a data generation time. + """ self.dgt.clear() @@ -1415,6 +1568,7 @@ def clearDgt(self) -> None: class AnnounceSyncType(ACMEIntEnum): """ Announce Sync Types """ + UNI_DIRECTIONAL = 1 """ Announcement shall be done uni-directional, ie. changes in the announced resource are not synced back.""" BI_DIRECTIONAL = 2 @@ -1537,9 +1691,92 @@ class SemanticFormat(ACMEIntEnum): SemanticFormat.FF_RdfTurtle: 'ttl', SemanticFormat.FF_Manchester: 'manchester', SemanticFormat.FF_JsonLD: 'json-ld', +} +""" Mappings between semantic formats and strings representations. """ + + +############################################################################## +# +# LocationPolicy and GeoQuery related +# + +class LocationSource(ACMEIntEnum): + """ Location Source. + """ + Network_based = 1 + """ Network based. """ + Device_based = 2 + """ Device based. """ + Sharing_based = 3 + """ Sharing based. """ + + +class GeofenceEventCriteria(ACMEIntEnum): + """ Geofence Event Criteria. + """ + + Entering = 1 + """ Entering. """ + Leaving = 2 + """ Leaving. """ + Inside = 3 + """ Inside. """ + Outside = 4 + """ Outside. """ + + +class LocationUpdateEventCriteria(ACMEIntEnum): + """ Location Update Event Criteria. + """ + + Location_Change = 0 + """ Location Change. """ + + +class LocationInformationType(ACMEIntEnum): + """ Location Information Type. + """ + + Position_fix = 1 + """ Position fix. """ + Geofence_event = 2 + """ Geofence event. """ + + +class GeometryType(ACMEIntEnum): + """ Geometry Type. + """ + Point = 1 + """ Point.""" + LineString = 2 + """ LineString. """ + Polygon = 3 + """ Polygon. """ + MultiPoint = 4 + """ MultiPoint. """ + MultiLineString = 5 + """ MultiLineString. """ + MultiPolygon = 6 + """ MultiPolygon. """ + +Coordinate = Tuple[float, float, Optional[float]] +""" Coordinate type. """ +ListOfCoordinates = list[Coordinate] +""" List of coordinates type. """ + + +class GeoSpatialFunctionType(ACMEIntEnum): + """ Geo Spatial Function Type. + """ + Within = 1 + """ Within.""" + Contains = 2 + """ Contains.""" + Intersects = 3 + """ Intersects.""" + -} ############################################################################## # @@ -1553,11 +1790,17 @@ class Result: the general result, a status code, values, resources etc. """ resource:Resource = None # type: ignore # Actually this is a Resource type, but have a circular import problem. + """ Resource instance. """ data:Any|Sequence[Any]|Tuple|JSON|str = None # Anything, or list of anything, or a JSON dictionary + """ Data. """ rsc:ResponseStatusCode = ResponseStatusCode.UNKNOWN # The responseStatusCode of a Result + """ ResponseStatusCode. """ dbg:Optional[str] = None + """ Optional debug message. """ request:Optional[CSERequest] = None # may contain the processed incoming request object + """ Optional `CSERequest`. """ embeddedRequest:Optional[CSERequest] = None # May contain a request as a response, e.g. when polling + """ Optional embedded `CSERequest`. """ # def errorResultCopy(self) -> Result: @@ -1588,6 +1831,14 @@ class Result: def toData(self, ct:Optional[ContentSerializationType] = None) -> str|bytes|JSON: + """ Return the result data as a string or bytes or JSON. + + Args: + ct: The content serialization type to use. If not given, the default serialization type is used. + + Return: + The result data as a string or bytes or JSON. + """ from ..resources.Resource import Resource from ..etc.RequestUtils import serializeData from ..services.CSE import defaultSerialization @@ -1736,7 +1987,20 @@ class FilterCriteria: lbl:list = None """ List of labels. Default is *None*. """ - aq:str = None + gmty:GeometryType = None + """ geometryType for geo-query. Default is *None*. """ + + geom:str = None + """ geometry for geo-query. Default is *None*. """ + + _geom:list = None + """ Internal attribute to hold a parsed geometry. Default is *None*.""" + + gsf:GeoSpatialFunctionType = None + """ geoSpatialFunction for geo-query. Default is *None*. """ + + + aq:str = None # EXPERIMENTAL """ Advanced query. Default is *None*. """ @@ -1766,7 +2030,7 @@ def criteriaAttributes(self) -> dict: """ return { k:v for k, v in self.__dict__.items() - if k is not None and k not in [ 'fu', 'fo', 'lim', 'ofst', 'lvl', 'arp', 'attributes' ] and v is not None + if k is not None and k not in ( 'fu', 'fo', 'lim', 'ofst', 'lvl', 'arp', 'attributes', 'gmty', 'geom', '_geom', 'gsf' ) and v is not None } @@ -1790,6 +2054,8 @@ def _fill(k:str, v:Any) -> None: return if k == 'fo' and int(v) == FilterOperation.AND: return + if k.startswith('_'): # internal attributes + return result[k] = v self.mapAttributes(_fill, False) @@ -1927,15 +2193,18 @@ def __post_init__(self) -> None: _rqetUTCts:float = None # X-M2M-RET as UTC based timestamp """ Request Expiration Timestamp as UTC-based timestamp (internal). """ + rset:str = None + """ Result Expiration Time in ISO8901 format or as ms (X-M2M-RST). """ + + _rsetUTCts:float = None # X-M2M-RET as UTC based timestamp + """ Result Expiration Timestamp as UTC-based timestamp (internal). """ + ot:str = None """ Originating Timestamp in ISO8901 format. """ oet:str = None """ Operation Execution Time in ISO8901 format or as ms (X-M2M-OET). """ - rset:str = None - """ Result Expiration Time in ISO8901 format or as ms (X-M2M-RST). """ - rtu:list[str] = None """ The notificationURI element of the Response Type parameter(X-M2M-RTU). """ @@ -1979,9 +2248,6 @@ def __post_init__(self) -> None: httpAccept:list[str] = None """ http Accept header media type. """ - originalHttpArgs:Any = None - """ Original http request arguments. A MultiDict. """ - # # Helpers # @@ -2036,8 +2302,6 @@ def fillOriginalRequest(self, update:bool = False) -> None: self.originalRequest['rset'] = self.rset if self.rtu: self.originalRequest['rtu'] = self.rtu - if self.rset: - self.originalRequest['rset'] = self.rset # TODO is the content serialization type necessary to store? An "ct" is not the right shortname # if self.ct: # self.originalRequest['ct'] = self.ct @@ -2082,27 +2346,49 @@ def convertToR1Target(self, targetRvi:str) -> CSERequest: @dataclass class AttributePolicy: + """ Attribute policy for a single resource attribute. + """ # !!! DON'T CHANGE the order of the attributes! type:BasicType + """ Type of the attribute. """ cardinality:Cardinality + """ Cardinality of the attribute. """ optionalCreate:RequestOptionality + """ Optionality of the attribute for create requests. """ optionalUpdate:RequestOptionality + """ Optionality of the attribute for update requests. """ optionalDiscovery:RequestOptionality + """ Optionality of the attribute for discovery requests. """ announcement:Announced + """ Whether the attribute is announced. """ sname:str = None # short name + """ Short name of the attribute. """ lname:str = None # longname + """ Long name of the attribute. """ namespace:str = None # namespace + """ Namespace of the attribute. """ tpe:str = None # namespace:type name + """ Type name of the attribute. """ rtypes:List[ResourceTypes] = None # Optional list of multiple resourceTypes + """ List of resource types that this attribute is valid for. """ ctype:str = None # Definition for a complex type attribute + """ Definition name for a complex type attribute. """ typeName:str = None # The type as written in the definition + """ The type as written in the definition. """ fname:str = None # Name of the definition file + """ Name of the definition file. """ ltype:BasicType = None # sub-type of a list + """ Sub-type of a list as writen in the definition. """ + etype:str = None # name of the enum type + """ Name of the enum type (if the attribute is of type *enum*). """ lTypeName:str = None # sub-type of a list as writen in the definition - evalues:list[Any] = None # List of enum values + """ Sub-type of a list as writen in the definition. """ + evalues:dict[int, str] = None # Dict of enum values and interpretations + """ Dict of enum values and interpretations. """ ptype:Type = None # Implementation type of the enum values + """ Implementation type of the enum values. """ # TODO support annnouncedSyncType @@ -2120,12 +2406,17 @@ def select(self, index:int) -> Optional[Any]: return None -""" Represent a dictionary of attribute policies used in validation. """ AttributePolicyDict = Dict[str, AttributePolicy] +""" Represent a dictionary of attribute policies used in validation. """ + ResourceAttributePolicyDict = Dict[Tuple[Union[ResourceTypes, str], str], AttributePolicy] +""" Represent a dictionary of attribute policies used in validation. """ FlexContainerAttributes = Dict[str, Dict[str, AttributePolicy]] +""" Type definition for a dictionary of attribute policies for a flexContainer. """ + FlexContainerSpecializations = Dict[str, str] +""" Type definition for a dictionary of specializations for a flexContainer. """ ############################################################################## @@ -2135,17 +2426,23 @@ def select(self, index:int) -> Optional[Any]: Parameters = Dict[str, str] -Attributes = Dict[str, Any] +""" Type definition for a dictionary of parameters. """ JSON = Dict[str, Any] +""" Type definition for a JSON type, which is just a dictionary. """ JSONLIST = List[JSON] +""" Type definition for a list of JSON types. """ ReqResp = Dict[str, Union[int, str, List[str], JSON]] +""" Type definition for a dictionary of request/response parameters. """ RequestCallback = namedtuple('RequestCallback', 'ownRequest dispatcherRequest sendRequest httpEvent mqttEvent') +""" Type definition for a callback function to handle outgoing requests. """ RequestHandler = Dict[Operation, RequestCallback] """ Type definition for a map between operations and handler for outgoing request operations. """ RequestResponse = namedtuple('RequestResponse', 'request result') +""" Type definition for a request/response pair. """ RequestResponseList = List[RequestResponse] +""" Type definition for a list of request/response pairs. """ FactoryCallableT = Callable[ [ Dict[str, object], str, str, bool], object ] """ Type definition for a factory callback to create and initializy a Resource instance. """ \ No newline at end of file diff --git a/acme/etc/Utils.py b/acme/etc/Utils.py index d446100e..05f1538a 100644 --- a/acme/etc/Utils.py +++ b/acme/etc/Utils.py @@ -107,6 +107,8 @@ def noNamespace(id:str) -> str: _randomIDCharSet = string.ascii_uppercase + string.digits + string.ascii_lowercase +""" Character set for random IDs. """ + def _randomID() -> str: """ Generate an ID. Prevent certain patterns in the ID. @@ -152,7 +154,7 @@ def isCSERelative(uri:str) -> bool: return uri is not None and not uri.startswith('/') -def isStructured(uri:str) -> bool: +def isStructured(uri:str) -> bool: # type: ignore[return] """ Test whether a URI is in structured format. Args: @@ -160,16 +162,18 @@ def isStructured(uri:str) -> bool: Return: Boolean if the URI is in structured format """ - if isCSERelative(uri): - return '/' in uri or uri == CSE.cseRn - elif isSPRelative(uri): - return uri.count('/') > 2 - elif isAbsolute(uri): - return uri.count('/') > 4 - return False + match uri: + case x if isCSERelative(uri): + return '/' in uri or uri == CSE.cseRn + case x if isSPRelative(uri): + return uri.count('/') > 2 + case x if isAbsolute(uri): + return uri.count('/') > 4 + case _: + return False -def localResourceID(ri:str) -> Optional[str]: +def localResourceID(ri:str) -> Optional[str]: # type: ignore[return] """ Test whether an ID is a resource ID of the local CSE. Args: @@ -197,16 +201,19 @@ def _checkDash(ri:str) -> str: if ri == CSE.cseCsi: return CSE.cseRn - if isAbsolute(ri): - if ri.startswith(CSE.cseAbsoluteSlash): - return _checkDash(ri[len(CSE.cseAbsoluteSlash):]) - return None - elif isSPRelative(ri): - if ri.startswith(CSE.cseCsiSlash): - return _checkDash(ri[len(CSE.cseCsiSlash):]) - return None - return ri + match ri: + case x if isAbsolute(x): + if ri.startswith(CSE.cseAbsoluteSlash): + return _checkDash(ri[len(CSE.cseAbsoluteSlash):]) + return None + case x if isSPRelative(x): + if ri.startswith(CSE.cseCsiSlash): + return _checkDash(ri[len(CSE.cseCsiSlash):]) + return None + case _: + return ri + def isValidID(id:str, allowEmpty:Optional[bool] = False) -> bool: """ Test for a valid ID. @@ -223,6 +230,8 @@ def isValidID(id:str, allowEmpty:Optional[bool] = False) -> bool: _unreserved = re.compile(r'^[\w\-.~]*$') +""" Regular expression to test for unreserved characters. """ + def hasOnlyUnreserved(id:str) -> bool: """ Test that an ID only contains characters from the unreserved character set of RFC 3986. @@ -236,6 +245,8 @@ def hasOnlyUnreserved(id:str) -> bool: _csiRx = re.compile('^/[^/\s]+') # Must start with a / and must not contain a further / or white space +""" Regular expression to test for valid CSE-ID format. """ + def isValidCSI(csi:str) -> bool: """ Test for valid CSE-ID format. @@ -248,6 +259,8 @@ def isValidCSI(csi:str) -> bool: _aeRx = re.compile('^[^/\s]+') # Must not start with a / and must not contain a further / or white space +""" Regular expression to test for valid AE-ID format. """ + def isValidAEI(aei:str) -> bool: """ Test for valid AE-ID format. @@ -318,10 +331,11 @@ def csiFromRelativeAbsoluteUnstructured(id:str) -> Tuple[str, list[str]]: Tuple (CSE ID (no leading slashes) without any SP-ID or CSE-ID, list of path elements) """ ids = id.split('/') - if isSPRelative(id): - return ids[1], ids - elif isAbsolute(id): - return ids[3], ids + match id: + case x if isSPRelative(x): + return ids[1], ids + case x if isAbsolute(x): + return ids[3], ids return id, ids @@ -386,67 +400,62 @@ def retrieveIDFromPath(id:str) -> Tuple[str, str, str, str]: vrPresent = ids.pop() # remove and return last path element idsLen -= 1 - # CSE-Relative (first element is not /) - if lvl == 0: - # L.logDebug("CSE-Relative") - if idsLen == 1 and ((ids[0] != CSE.cseRn and ids[0] != '-') or ids[0] == CSE.cseCsiSlashLess): # unstructured - ri = ids[0] - else: # structured - if ids[0] == '-': # replace placeholder "-". Always convert in CSE-relative - ids[0] = CSE.cseRn - srn = '/'.join(ids) - - # SP-Relative (first element is /) - elif lvl == 1: - # L.logDebug("SP-Relative") - if idsLen < 2: - return None, None, None, f'ID too short: {id}. Must be //.' - csi = ids[0] # extract the csi - if csi != CSE.cseCsiSlashLess: # Not for this CSE? retargeting - if vrPresent: # append last path element again - ids.append(vrPresent) - return id, csi, srn, None # Early return. ri is the (un)structured path - # if idsLen == 1: - # # ri = ids[0] - # return None, None, None, 'ID too short' - #elif idsLen > 1: + match lvl: + + # CSE-Relative (first element is not /) + case 0: + if idsLen == 1 and ((ids[0] != CSE.cseRn and ids[0] != '-') or ids[0] == CSE.cseCsiSlashLess): # unstructured + ri = ids[0] + else: # structured + if ids[0] == '-': # replace placeholder "-". Always convert in CSE-relative + ids[0] = CSE.cseRn + srn = '/'.join(ids) + + # SP-Relative (first element is /) + case 1: + # L.logDebug("SP-Relative") + if idsLen < 2: + return None, None, None, f'ID too short: {id}. Must be //.' + csi = ids[0] # extract the csi + if csi != CSE.cseCsiSlashLess: # Not for this CSE? retargeting + if vrPresent: # append last path element again + ids.append(vrPresent) + return id, csi, srn, None # Early return. ri is the (un)structured path - # replace placeholder "-", convert in CSE-relative when the target is this CSE - if ids[1] == '-' and ids[0] == CSE.cseCsiSlashLess: - ids[1] = CSE.cseRn - if ids[1] == CSE.cseRn: # structured - srn = '/'.join(ids[1:]) # remove the csi part - elif idsLen == 2: # unstructured - ri = ids[1] - else: - return None, None, None, 'Too many "/" level' - - # Absolute (2 first elements are /) - elif lvl == 2: - # L.logDebug("Absolute") - if idsLen < 3: - return None, None, None, 'ID too short. Must be ////.' - spi = ids[0] - csi = ids[1] - if spi != CSE.cseSpid: # Check for SP-ID - return None, None, None, f'SP-ID: {CSE.cseSpid} does not match the request\'s target ID SP-ID: {spi}' - if csi != CSE.cseCsiSlashLess: # Check for CSE-ID - if vrPresent: # append virtual last path element again - ids.append(vrPresent) - return id, csi, srn, None # Not for this CSE? retargeting - # if idsLen == 2: - # ri = ids[1] - # elif idsLen > 2: - - # replace placeholder "-", convert in absolute when the target is this CSE - if ids[2] == '-' and ids[1] == CSE.cseCsiSlashLess: - ids[2] = CSE.cseRn - if ids[2] == CSE.cseRn: # structured - srn = '/'.join(ids[2:]) - elif idsLen == 3: # unstructured - ri = ids[2] - else: - return None, None, None, 'Too many "/" level' + # replace placeholder "-", convert in CSE-relative when the target is this CSE + if ids[1] == '-' and ids[0] == CSE.cseCsiSlashLess: + ids[1] = CSE.cseRn + if ids[1] == CSE.cseRn: # structured + srn = '/'.join(ids[1:]) # remove the csi part + elif idsLen == 2: # unstructured + ri = ids[1] + else: + return None, None, None, 'Too many "/" level' + + + # Absolute (2 first elements are /) + case 2: + # L.logDebug("Absolute") + if idsLen < 3: + return None, None, None, 'ID too short. Must be ////.' + spi = ids[0] + csi = ids[1] + if spi != CSE.cseSpid: # Check for SP-ID + return None, None, None, f'SP-ID: {CSE.cseSpid} does not match the request\'s target ID SP-ID: {spi}' + if csi != CSE.cseCsiSlashLess: # Check for CSE-ID + if vrPresent: # append virtual last path element again + ids.append(vrPresent) + return id, csi, srn, None # Not for this CSE? retargeting + + # replace placeholder "-", convert in absolute when the target is this CSE + if ids[2] == '-' and ids[1] == CSE.cseCsiSlashLess: + ids[2] = CSE.cseRn + if ids[2] == CSE.cseRn: # structured + srn = '/'.join(ids[2:]) + elif idsLen == 3: # unstructured + ri = ids[2] + else: + return None, None, None, 'Too many "/" level' # Now either csi, ri or structured srn is set if ri: @@ -578,6 +587,8 @@ def riFromID(id:str) -> str: # r'\S+' # re.IGNORECASE # optional path ) +""" Regular expression to test for a valid URL. """ + def isURL(url:str) -> bool: """ Check whether a given string is a URL. @@ -642,7 +653,10 @@ def normalizeURL(url:str) -> str: # _excludeFromRoot = [ 'pi' ] +""" Attributes that are excluded from the root of a resource tree. """ + _pureResourceRegex = re.compile('[\w]+:[\w]') +""" Regular expression to test for a pure resource name. """ def pureResource(dct:JSON) -> Tuple[JSON, str, str]: """ Return the "pure" structure without the ":xxx" resource type name, and the oneM2M type identifier. @@ -739,6 +753,15 @@ def resourceModifiedAttributes(old:JSON, new:JSON, requestPC:JSON, modifiers:Opt def filterAttributes(dct:JSON, attributesToInclude:JSON) -> JSON: + """ Filter a dictionary by a list of attributes to include. + + Args: + dct: Dictionary to filter. + attributesToInclude: List of attributes to include. + + Return: + Filtered dictionary. + """ return { k: v for k, v in dct.items() if k in attributesToInclude } @@ -766,22 +789,25 @@ def getAttributeSize(attribute:Any) -> int: Byte size of the attribute's value. """ size = 0 - if isinstance(attribute, str): - size = len(attribute) - elif isinstance(attribute, int): - size = 4 - elif isinstance(attribute, float): - size = 8 - elif isinstance(attribute, bool): - size = 1 - elif isinstance(attribute, list): # recurse a list - for e in attribute: - size += getAttributeSize(e) - elif isinstance(attribute, dict): # recurse a dictionary - for _,v in attribute: - size += getAttributeSize(v) - else: - size = sys.getsizeof(attribute) # fallback for not handled types + + match attribute: + case str(): + size = len(attribute) + case int(): + size = 4 + case float(): + size = 8 + case bool(): + size = 1 + case list(): # recurse a list + for e in attribute: + size += getAttributeSize(e) + case dict(): # recurse a dictionary + for _,v in attribute.items(): + size += getAttributeSize(v) + case _: # fallback for not handled types + size = sys.getsizeof(attribute) + return size @@ -874,5 +900,12 @@ def runsInIPython() -> bool: def reverseEnumerate(data:list) -> Generator[Tuple[int, Any], None, None]: + """ Reverse enumerate a list. + + Args: + data: List to enumerate. + Return: + Generator that yields a tuple with the index and the value of the list. + """ for i in range(len(data)-1, -1, -1): yield (i, data[i]) \ No newline at end of file diff --git a/acme/helpers/ACMEIntEnum.py b/acme/helpers/ACMEIntEnum.py index d5651b27..ef76dfaf 100644 --- a/acme/helpers/ACMEIntEnum.py +++ b/acme/helpers/ACMEIntEnum.py @@ -90,6 +90,16 @@ def __str__(self) -> str: The name of an enum value. """ return self.name + + + def __int__(self) -> int: + """ Get the integer value of an enum. + + Return: + The value of an enum value. + """ + return self.value + def __repr__(self) -> str: diff --git a/acme/helpers/BackgroundWorker.py b/acme/helpers/BackgroundWorker.py index 1d2ca5d7..35a3f79c 100644 --- a/acme/helpers/BackgroundWorker.py +++ b/acme/helpers/BackgroundWorker.py @@ -13,7 +13,7 @@ from typing import Callable, List, Dict, Any, Tuple, Optional from .TextTools import simpleMatch -import random, sys, heapq, traceback, time +import random, sys, heapq, traceback, time, inspect from datetime import datetime, timezone from threading import Thread, Timer, Event, RLock, Lock, enumerate as threadsEnumerate import logging @@ -30,6 +30,16 @@ def _utcTime() -> float: class BackgroundWorker(object): """ This class provides the functionality for background worker or a single actor instance. + + Background workers are executed in a separate thread. + + They are executed periodically according to the interval. The interval is the time between + the end of the previous execution and the start of the next execution. The interval is usually + not the time betweenthe start of two consecutive executions, but this could be achieved by setting the + *runOnTime* parameter to *True*. This will compensate for the processing time of the + worker callback. + + Background workers can be stopped and started again. They can also be paused and resumed. """ __slots__ = ( @@ -51,6 +61,7 @@ class BackgroundWorker(object): 'data', 'args', ) + """ Slots for the class. """ # Holds a reference to an specific logging function. # This must have the same signature as the `logging.log` method. @@ -72,22 +83,56 @@ def __init__(self, finished:Optional[Callable] = None, ignoreException:Optional[bool] = False, data:Optional[Any] = None) -> None: + """ Initialize a background worker. + + Args: + interval: Interval in seconds to run the worker callback. + callback: Callback to run as a worker. + name: Name of the worker. + startWithDelay: If True then start the worker after a `interval` delay. + maxCount: Maximum number runs. + dispose: If True then dispose the worker after finish. + id: Unique ID of the worker. + runOnTime: If True then the worker is always run *at* the interval, otherwise the interval starts *after* the worker execution. + runPastEvents: If True then runs in the past are executed, otherwise they are dismissed. + finished: Callable that is executed after the worker finished. + ignoreException: Restart the actor in case an exception is encountered. + data: Any data structure that is stored in the worker and accessible by the *data* attribute, and which is passed as the first argument in the *_data* argument of the *workerCallback* if not *None*. + """ self.interval = interval + """ Interval in seconds to run the worker callback. """ self.runOnTime = runOnTime # Compensate for processing time + """ If True then the worker is always run *at* the interval, otherwise the interval starts *after* the worker execution. """ self.runPastEvents = runPastEvents # Run events that are in the past + """ If True then missed worker runs in the past are executed, otherwise they are dismissed. """ self.nextRunTime:float = None # Timestamp + """ Timestamp of the next execution. """ self.callback = callback # Actual callback to process + """ Callback function to run as a worker. """ self.running = False # Indicator that a worker is running or will be stopped + """ True if the worker is running. """ self.executing = False # Indicator that the worker callback is currently executed + """ True if the worker is currently executing. """ self.name = name + """ Name of the worker. """ self.startWithDelay = startWithDelay + """ If True then start the worker after a `interval` delay. """ self.maxCount = maxCount # max runs + """ Maximum number runs. """ self.numberOfRuns = 0 # Actual runs + """ Number of runs. """ self.dispose = dispose # Only run once, then remove itself from the pool + """ If True then dispose the worker after finish. """ self.finished = finished # Callback after worker finished + """ Callback that is executed after the worker finished. """ self.ignoreException = ignoreException # Ignore exception when running workers + """ Restart the actor in case an exception is encountered. """ self.id = id + """ Unique ID of the worker. """ self.data = data # Any extra data + """ Any data structure that is stored in the worker and accessible by the *data* attribute, and which is passed as the first argument in the *_data* argument of the *workerCallback* if not *None*. """ + self.args:Dict[str, Any] = {} # Arguments for the callback + """ Arguments for the callback. """ @@ -213,10 +258,15 @@ def _work(self) -> None: # - ignoreException is False then the exception is raised again while True: try: - if self.data is not None: - result = self.callback(_data = self.data, **self.args) - else: - result = self.callback(**self.args) + # check whether the callback has a _data and _worker argument + # and add them if they are + argSpec = inspect.getfullargspec(self.callback) + if '_data' in argSpec.args: + self.args['_data'] = self.data + if '_worker' in argSpec.args: + self.args['_worker'] = self + # call the callback + result = self.callback(**self.args) break except Exception as e: if BackgroundWorker._logger: @@ -269,7 +319,12 @@ def _postCall(self) -> None: def __repr__(self) -> str: - return f'BackgroundWorker(name={self.name}, callback = {str(self.callback)}, running = {self.running}, interval = {self.interval:f}, startWithDelay = {self.startWithDelay}, numberOfRuns = {self.numberOfRuns:d}, dispose = {self.dispose}, id = {self.id}, runOnTime = {self.runOnTime})' + """ Return a string representation of the worker. + + Return: + A string representation of the worker. + """ + return f'BackgroundWorker(name={self.name}, callback = {str(self.callback)}, running = {self.running}, interval = {self.interval:f}, startWithDelay = {self.startWithDelay}, numberOfRuns = {self.numberOfRuns:d}, dispose = {self.dispose}, id = {self.id}, runOnTime = {self.runOnTime}, data = {self.data})' @@ -287,18 +342,26 @@ class Job(Thread): 'Callable', 'finished', ) + """ Slots for the class.""" - jobListLock = RLock() # Re-entrent lock (for the same thread) + jobListLock = RLock() + """ Lock for the job lists. """ # Paused and running job lists pausedJobs:list[Job] = [] + """ List of paused jobs. """ runningJobs:list[Job] = [] + """ List of running jobs. """ # Defaults for reducing overhead jobs - _balanceTarget:float = 3.0 # Target balance between paused and running jobs (n paused for 1 running) - _balanceLatency:int = 1000 # Number of requests for getting a new Job before a check - _balanceReduceFactor:float = 2.0 # Factor to reduce the paused jobs (number of paused / balanceReduceFactor) - _balanceCount:int = 0 # Counter for current runs. Compares against balance + _balanceTarget:float = 3.0 + """ Target balance between paused and running jobs (n paused for 1 running). """ + _balanceLatency:int = 1000 + """ Number of requests for getting a new Job before a balance check. """ + _balanceReduceFactor:float = 2.0 + """ Factor to reduce the paused jobs (number of paused / balanceReduceFactor). """ + _balanceCount:int = 0 + """ Counter for current runs. Compares against balance. """ def __init__(self, *args:Any, **kwargs:Any) -> None: @@ -434,6 +497,8 @@ def getJob(cls, task:Callable, finished:Optional[Callable] = None, name:Optional @classmethod def _balanceJobs(cls) -> None: + """ Internal function to balance the number of paused and running jobs. + """ if not Job._balanceLatency: return Job._balanceCount += 1 @@ -461,46 +526,77 @@ def setJobBalance(cls, balanceTarget:Optional[float] = 3.0, class WorkerEntry(object): + """ Internal class for a worker entry in the priority queue. + """ __slots__ = ( 'timestamp', 'workerID', 'workerName', ) - - # timestamp:float = 0.0 - # workerID:int = None - # workerName:str = None + """ Slots for the class. """ def __init__(self, timestamp:float, workerID:int, workerName:str) -> None: + """ Initialize a WorkerEntry. + + Args: + timestamp: Timestamp of the next execution. + workerID: ID of the worker. + workerName: Name of the worker. + """ self.timestamp = timestamp + """ Timestamp of the next execution. """ self.workerID = workerID + """ ID of the worker. """ self.workerName = workerName + """ Name of the worker. """ def __lt__(self, other:WorkerEntry) -> bool: + """ Compare two WorkerEntry objects for less-than. + + Args: + other: The other WorkerEntry object to compare with. + + Return: + True if this WorkerEntry is less than the other. + """ return self.timestamp < other.timestamp def __str__(self) -> str: + """ Return a string representation of the WorkerEntry. + + Return: + A string representation of the WorkerEntry. + """ return f'(ts: {self.timestamp} id: {self.workerID} name: {self.workerName})' def __repr__(self) -> str: + """ Return a string representation of the WorkerEntry. + + Return: + A string representation of the WorkerEntry. + """ return self.__str__() class BackgroundWorkerPool(object): """ Pool and factory for background workers and actors. """ + backgroundWorkers:Dict[int, BackgroundWorker] = {} + """ All background workers. """ workerQueue:list[WorkerEntry] = [] """ Priority queue. Contains tuples (next execution timestamp, worker ID, worker name). """ workerTimer:Timer = None """ A single timer to run the next task in the *workerQueue*. """ queueLock:Lock = Lock() + """ Lock for the *workerQueue*. """ timerLock:Lock = Lock() + """ Lock for the *workerTimer*. """ def __new__(cls, *args:str, **kwargs:str) -> BackgroundWorkerPool: @@ -598,7 +694,7 @@ def newActor(cls, workerCallback:Callable, (it may be 0.0s, though), or *at* a specific time (UTC timestamp). Args: - workerCallback: Callback that is executed to perform the action for the actor. + workerCallback: Callback that is executed to perform the action for the actor. It will receive the *data* in its *_data*, and the worker itself in the *_worker* arguments (if available as arguments). delay: Delay in seconds after which the actor callback is executed. This is an alternative to *at*. Only one of *at* or *delay* must be specified. @@ -610,7 +706,7 @@ def newActor(cls, workerCallback:Callable, finished: Callable that is executed after the worker finished. It will receive the same arguments as the *workerCallback* callback. ignoreException: Restart the actor in case an exception is encountered. - data: Any data structure that is stored in the worker and accessible by the *data* attribute, and which is passed as the first argument in the *_data* argument of the *workerCallback* if not *None*. + data: Any data structure that is stored in the worker and accessible by the *data* attribute, and which is passed in the *_data* argument of the *workerCallback* if not *None*. Return: `BackgroundWorker` object. It is only an initialized object and needs to be started manually with its `start()` method. """ diff --git a/acme/helpers/EventManager.py b/acme/helpers/EventManager.py index ffce3cb9..af297361 100644 --- a/acme/helpers/EventManager.py +++ b/acme/helpers/EventManager.py @@ -44,11 +44,6 @@ class Event(list): # type:ignore[type-arg] Attention: Since the parent class is a *list* calling *isInstance(obj, list)* will return True. - - Attributes: - runInBackground: Indicator whether an event should be handled in a separate thread. - manager: The responsible `EventManager` to handle an event. - name: The event name. """ __slots__ = ( @@ -56,6 +51,7 @@ class Event(list): # type:ignore[type-arg] 'manager', 'name', ) + """ Slots of the Event class. """ def __init__(self, runInBackground:Optional[bool] = True, manager:Optional[EventManager] = None, @@ -68,8 +64,11 @@ def __init__(self, runInBackground:Optional[bool] = True, name: The event name. """ self.runInBackground = runInBackground + """ Indicator whether an event should be handled in a separate thread. """ self.manager = manager + """ The responsible `EventManager` to handle an event. """ self.name = name + """ The event name. """ def __call__(self, *args:Any, **kwargs:Any) -> None: @@ -131,20 +130,20 @@ class EventManager(object): manager.anEvent(anArg) Raise the *anEvent* `Event` with an *anArg* argument. - - Attributes: - _running: Internal Running indicator for the manager instance. """ __slots__ = ( '_running', ) + """ Slots of the EventManager class. """ def __init__(self) -> None: """ EventManager initialization. """ self._running = True + """ Internal Running indicator for the manager instance. """ + def shutdown(self) -> bool: """ Shutdown the Event Manager. diff --git a/acme/helpers/Interpreter.py b/acme/helpers/Interpreter.py index 7474abd7..c47eeda1 100644 --- a/acme/helpers/Interpreter.py +++ b/acme/helpers/Interpreter.py @@ -7,6 +7,9 @@ # Implementation of a simple s-expression-based command processor. # """ The interpreter module implements an extensible lisp-based scripting runtime. + + See: + `PContext` for the main class to run a script. """ from __future__ import annotations @@ -176,11 +179,13 @@ def unquote(self) -> SType: Return: The unquotde version of a quoted type. If the type is not a quoted type then return the same type. """ - if self == SType.tListQuote: - return SType.tList - elif self == SType.tSymbolQuote: - return SType.tSymbol - return self + match self: + case SType.tListQuote: + return SType.tList + case SType.tSymbolQuote: + return SType.tSymbol + case _: + return self class SSymbol(object): @@ -237,20 +242,19 @@ def __init__(self, string:str = None, # Try to determine an unknown type if value: - if isinstance(value, bool): - boolean = value - elif isinstance(value, str): - string = value - elif isinstance(value, (int, float)): - number = Decimal(value) - # elif isinstance(value, list): - # lstQuote = value - elif isinstance(value, dict): - jsn = value - elif isinstance(value, list): - lstQuote = [ SSymbol(value = _v) for _v in value ] - else: - raise ValueError(f'Unsupported type: {type(value)} for value: {value}') + match value: + case bool(): + boolean = value + case str(): + string = value + case int() | float(): + number = Decimal(value) + case dict(): + jsn = value + case list(): + lstQuote = [ SSymbol(value = _v) for _v in value ] + case _: + raise ValueError(f'Unsupported type: {type(value)} for value: {value}') # Assign known types if string is not None: # could be empty string @@ -299,7 +303,7 @@ def __init__(self, string:str = None, self.length = 1 else: self.type = SType.tNIL - self.value = False + self.value = None # was: False self.length = 0 @@ -358,25 +362,27 @@ def __contains__(self, obj:Any) -> bool: def toString(self, quoteStrings:bool = False, pythonList:bool = False) -> str: - if self.type in [ SType.tList, SType.tListQuote ]: - # Set the list chars - lchar1 = '[' if pythonList else '(' - lchar2 = ']' if pythonList else ')' - return f'{lchar1} {" ".join(lchar1 if v == "[" else lchar2 if v == "]" else v.toString(quoteStrings = quoteStrings, pythonList = pythonList) for v in cast(list, self.value))} {lchar2}' - # return f'( {" ".join(str(v) for v in cast(list, self.value))} )' - elif self.type == SType.tLambda: - return f'( ( {", ".join(v.toString(quoteStrings = quoteStrings, pythonList = pythonList) for v in cast(tuple, self.value)[0])} ) {str(cast(tuple, self.value)[1])} )' - elif self.type == SType.tBool: - return str(self.value).lower() - elif self.type == SType.tString: - if quoteStrings: - return f'"{str(self.value)}"' - return str(self.value) - elif self.type == SType.tJson: - return json.dumps(self.value) - elif self.type == SType.tNIL: - return 'nil' - return str(self.value) + match self.type: + case SType.tList | SType.tListQuote: + # Set the list chars + lchar1 = '[' if pythonList else '(' + lchar2 = ']' if pythonList else ')' + return f'{lchar1} {" ".join(lchar1 if v == "[" else lchar2 if v == "]" else v.toString(quoteStrings = quoteStrings, pythonList = pythonList) for v in cast(list, self.value))} {lchar2}' + # return f'( {" ".join(str(v) for v in cast(list, self.value))} )' + case SType.tLambda: + return f'( ( {", ".join(v.toString(quoteStrings = quoteStrings, pythonList = pythonList) for v in cast(tuple, self.value)[0])} ) {str(cast(tuple, self.value)[1])} )' + case SType.tBool: + return str(self.value).lower() + case SType.tString: + if quoteStrings: + return f'"{str(self.value)}"' + return str(self.value) + case SType.tJson: + return json.dumps(self.value) + case SType.tNIL: + return 'nil' + case _: + return str(self.value) def append(self, arg:SSymbol) -> SSymbol: @@ -400,16 +406,18 @@ def raw(self) -> Any: Return: The raw value. For types that could not be converted directly the stringified version is returned. """ - if self.type in [ SType.tList, SType.tListQuote ]: - return [ v.raw() for v in cast(list, self.value) ] - elif self.type in [ SType.tBool, SType.tString, SType.tSymbol, SType.tSymbolQuote, SType.tJson ]: - return self.value - if self.type == SType.tNumber: - if '.' in str(self.value): # float or int? - return float(cast(Decimal, self.value)) - return int(cast(Decimal, self.value)) - return str(self.value) - + match self.type: + case SType.tList | SType.tListQuote: + return [ v.raw() for v in cast(list, self.value) ] + case SType.tBool | SType.tString | SType.tSymbol | SType.tSymbolQuote | SType.tJson: + return self.value + case SType.tNumber: + if '.' in str(self.value): # float or int? + return float(cast(Decimal, self.value)) + return int(cast(Decimal, self.value)) + case _: + return str(self.value) + class SExprParser(object): """ Class that implements an S-Expression parser. """ @@ -552,45 +560,51 @@ def ast(self, input:List[SSymbol]|str, index += 1 continue - if symbol.type == SType.tListBegin: # Start of another list - startIndex = index + 1 - matchCtr = 1 # If 0, parenthesis has been matched. - # Determine the matching closing paranthesis on the same level - while matchCtr != 0: - index += 1 - if index >= len(input): - self.errorExpression = input # type:ignore[assignment] - raise ValueError(f'Invalid input: Unmatched opening parenthesis: {input}') - symbol = input[index] - if symbol.type == SType.tListBegin: - matchCtr += 1 - elif symbol.type == SType.tListEnd: - matchCtr -= 1 - - if isQuote: # escaped with ' -> plain list - ast.append(SSymbol(lstQuote = self.ast(input[startIndex:index], False, allowBrackets))) - else: # normal list - ast.append(SSymbol(lst = self.ast(input[startIndex:index], False, allowBrackets))) - elif symbol.type == SType.tListEnd: + match symbol.type: + case SType.tListBegin: + startIndex = index + 1 + matchCtr = 1 # If 0, parenthesis has been matched. + # Determine the matching closing paranthesis on the same level + while matchCtr != 0: + index += 1 + if index >= len(input): + self.errorExpression = input # type:ignore[assignment] + raise ValueError(f'Invalid input: Unmatched opening parenthesis: {input}') + symbol = input[index] + + match symbol.type: + case SType.tListBegin: + matchCtr += 1 + case SType.tListEnd: + matchCtr -= 1 + # ignore other types + + if isQuote: # escaped with ' -> plain list + ast.append(SSymbol(lstQuote = self.ast(input[startIndex:index], False, allowBrackets))) + else: # normal list + ast.append(SSymbol(lst = self.ast(input[startIndex:index], False, allowBrackets))) + + case SType.tListEnd: self.errorExpression = input # type:ignore[assignment] raise ValueError('Invalid input: Unmatched closing parenthesis.') - elif symbol.type == SType.tJson: - ast.append(symbol) - elif symbol.type == SType.tString: - ast.append(symbol) - else: - try: - ast.append(SSymbol(number = Decimal(symbol.value))) # type:ignore [arg-type] - except InvalidOperation: - if symbol.type == SType.tSymbol and symbol.value in [ 'true', 'false' ]: - ast.append(SSymbol(boolean = (symbol.value == 'true'))) - elif symbol.type == SType.tSymbol and symbol.value == 'nil': - ast.append(SSymbol()) - else: - if (_s := cast(str, symbol.value)).startswith('\''): - ast.append(SSymbol(symbolQuote = _s)) - else: - ast.append(symbol) + + case SType.tJson | SType.tString: + ast.append(symbol) + + case _: + try: + ast.append(SSymbol(number = Decimal(symbol.value))) # type:ignore [arg-type] + except InvalidOperation: + match symbol.type: + case SType.tSymbol if symbol.value in [ 'true', 'false' ]: + ast.append(SSymbol(boolean = (symbol.value == 'true'))) + case SType.tSymbol if symbol.value == 'nil': + ast.append(SSymbol()) + case _: + if (_s := cast(str, symbol.value)).startswith('\''): + ast.append(SSymbol(symbolQuote = _s)) + else: + ast.append(symbol) index += 1 isQuote = False @@ -684,38 +698,24 @@ class PCall(): Attributes: name: Function name. arguments: Dictionary of arguments (name -> `SSymbol`) for a call. + variables: Dictionary of variables (name -> `SSymbol`) for a call. """ name:str = None arguments:dict[str, SSymbol] = field(default_factory = dict) + variables:dict[str,SSymbol] = field(default_factory = dict) + class PContext(): - """ Process context for a single script. Can be re-used. + """ Process context for a single script. + + This is the main runtime object for the interpreter. + To run a script, create a `PContext` object, and call its `run()` method. - Attributes: - argv: List of string that are arguments to the script. - ast: The script' abstract syntax tree. - environment: Dictionary of variables that are passed by the application to the script. Similar to `variables`, but the environment is not cleared. - error: Error state. - errorFunc: An optional function that is called when an error occured. - evaluateInline: Check and execute inline expressions in strings. - functions: Dictoonary of defined script functions. - logErrorFunc: An optional function that receives error log messages. - logFunc: An optional function that receives non-error log messages. - matchFunc: An optional function that is used to run regex comparisons. - maxRuntime: Number of seconds that is a script allowed to run. - meta: Dictionary of the script's meta tags and their arguments. - postFunc: An optional function that is called after running a script. - preFunc: An optional function that is called before running a script. - printFunc: An optional function for printing messages to the screen, console, etc. - result: Intermediate and final results during the execution. - script: The script to run. - state: The internal state of a script. - symbols: A dictionary of new symbols / functions to add to the interpreter. - variables: Dictionary of variables. - _maxRTimestamp: The max timestamp until the script may run (internal). - _callStack: The internal call stack (internal). - _symbolds: Dictionary with all build-in and provided functions (internal). + To add new symbols to the interpreter, inherit from `PContext` + and add them to the `symbols` dictionary during initialization. + + A `PContext` object can be re-used. """ __slots__ = ( @@ -738,7 +738,6 @@ class PContext(): 'state', 'error', 'meta', - 'variables', 'functions', 'environment', 'argv', @@ -746,7 +745,8 @@ class PContext(): 'verbose', '_maxRTimestamp', '_callStack', - '_symbolds', + '_symbols', + '_variables', ) """ Slots of class attributes. """ @@ -755,24 +755,24 @@ class PContext(): def __init__(self, script:str, - symbols:PSymbolDict = None, - logFunc:PLogCallable = lambda pcontext, msg: print(f'** {msg}'), - logErrorFunc:PErrorLogCallable = lambda pcontext, msg, exception: print(f'!! {msg}'), - printFunc:PLogCallable = lambda pcontext, msg: print(msg), - preFunc:PFuncCallable = None, - postFunc:PFuncCallable = None, - errorFunc:PFuncCallable = None, - matchFunc:PMatchCallable = lambda pcontext, l, r: l == r, - maxRuntime:float = None, - fallbackFunc:PSymbolCallable = None, - monitorFunc:PSymbolCallable = None, - allowBrackets:bool = False, - verbose:bool = False) -> None: + symbols:Optional[PSymbolDict] = None, + logFunc:Optional[PLogCallable] = lambda pcontext, msg: print(f'** {msg}'), + logErrorFunc:Optional[PErrorLogCallable] = lambda pcontext, msg, exception: print(f'!! {msg}'), + printFunc:Optional[PLogCallable] = lambda pcontext, msg: print(msg), + preFunc:Optional[PFuncCallable] = None, + postFunc:Optional[PFuncCallable] = None, + errorFunc:Optional[PFuncCallable] = None, + matchFunc:Optional[PMatchCallable] = lambda pcontext, l, r: l == r, + maxRuntime:Optional[float] = None, + fallbackFunc:Optional[PSymbolCallable] = None, + monitorFunc:Optional[PSymbolCallable] = None, + allowBrackets:Optional[bool] = False, + verbose:Optional[bool] = False) -> None: """ Initialization of a `PContext` object. Args: script: The script to run. - symbols: A dictionary of new symbols / functions to add to the interpreter. + symbols: An optional dictionary of new symbols / functions to add to the interpreter. logFunc: An optional function that receives non-error log messages. logErrorFunc: An optional function that receives error log messages. printFunc: An optional function for printing messages to the screen, console, etc. @@ -789,36 +789,66 @@ def __init__(self, # Extra parameters that can be provided self.script = script + """ The script to run. """ self.symbols = _builtinCommands + """ A dictionary of new symbols / functions to add to the interpreter. """ self.logFunc = logFunc + """ An optional function that receives non-error log messages. """ self.logErrorFunc = logErrorFunc + """ An optional function that receives error log messages. """ self.printFunc = printFunc + """ An optional function for printing messages to the screen, console, etc. """ self.preFunc = preFunc + """ An optional function that is called before running a script. """ self.postFunc = postFunc + """ An optional function that is called after running a script. """ self.errorFunc = errorFunc + """ An optional function that is called when an error occured. """ self.matchFunc = matchFunc + """ An optional function that is used to run regex comparisons. """ self.maxRuntime = maxRuntime + """ Number of seconds that is a script allowed to run. """ self.fallbackFunc = fallbackFunc + """ An optional function to retrieve unknown symbols from the caller. """ self.monitorFunc = monitorFunc + """ An optional function to monitor function calls, e.g. to forbid them during particular executions. """ self.allowBrackets = allowBrackets + """ Allow "[" and "]" for opening and closing lists as well. """ # State, result and error attributes self.ast:list[SSymbol] = None + """ The script's abstract syntax tree.""" self.result:SSymbol = None + """ Intermediate and final results during the execution. """ self.verbose:bool = verbose + """ Print more debug messages. """ self.state:PState = PState.created + """ The internal state of a script.""" self.error:PErrorState = PErrorState(PError.noError, 0, '', None ) + """ Error state. """ self.meta:Dict[str, str] = {} - self.variables:Dict[str,SSymbol] = {} + """ Dictionary of the script's meta tags and their arguments. """ self.functions:dict[str, FunctionDefinition] = {} - self.environment:Dict[str,SSymbol] = {} # Similar to variables, but not cleared + """ Dictoonary of defined script functions. """ + self.environment:Dict[str, SSymbol] = {} # Similar to variables, but not cleared + """ Dictionary of variables that are passed by the application to the script. Similar to `variables`, but the environment is not cleared. """ self.argv:list[str] = [] + """ List of string that are arguments to the script. """ self.evaluateInline = True # check and execute inline expressions + """ Check and execute inline expressions in strings. """ # Internal attributes that should not be accessed from extern self._maxRTimestamp:float = None - self._callStack:list[PCall] = [PCall()] - self._symbolds:PSymbolDict = None # builtins + provided commands + """ The max timestamp until the script may run (internal). """ + self._callStack:list[PCall] = [] + """ The internal call stack (internal). """ + self._symbols:PSymbolDict = None # builtins + provided commands + """ Dictionary with all build-in and provided functions (internal). """ + # self._variables:Dict[str, SSymbol] = {} + + + # Add one to the callstack to add variables + self.pushCall() # Add new commands if symbols: @@ -863,7 +893,6 @@ def reset(self) -> None: This method may also be implemented in a subclass, but that subclass must then call this method as well. """ self.error = PErrorState(PError.noError, 0, '', None) - self.variables.clear() self._callStack.clear() self.pushCall(name = self.meta.get('name')) self.state = PState.ready @@ -892,6 +921,16 @@ def setError(self, error:PError, self.state = state self.error = PErrorState(error, msg, expression, exception) return self + + + def clearError(self, state:Optional[PState] = PState.running) -> None: + """ Clear the error status. + + Args: + state: `PState` to indicate the state of the script. Default is "running". + """ + self.state = state + self.error = PErrorState(PError.noError, 0, '', None) def copyError(self, pcontext:PContext) -> None: @@ -914,7 +953,6 @@ def setResult(self, symbol:SSymbol) -> PContext: Return: Self. - """ self.result = symbol return self @@ -941,6 +979,8 @@ def pushCall(self, name:Optional[str] = None) -> None: raise PRuntimeError(self.setError(PError.maxRecursionDepth, f'Max level of function calls exceeded')) call = PCall() call.name = name + if len(self._callStack): + call.variables = deepcopy(self._callStack[-1].variables) # copy variables from the previous scope self._callStack.append(call) @@ -1012,10 +1052,35 @@ def getVariables(self, expression:str) -> list[Tuple[str, SSymbol]]: if re.match(_expr, k) ] return [ ( k, self.variables[k] ) for k in _keys ] + + @property + def variables(self) -> dict[str, SSymbol]: + """ The variables of the current scope. + + Returns: + The variables of the current scope. + """ + return self._callStack[-1].variables + + + def setVariable(self, key:str, value:SSymbol) -> None: + """ Set a variable for a name. If the variable exists in the global scope, it is updated or set in all scopes. + Otherwise, it is only updated or set in the current scope. + + Args: + key: Variable name + value: Value to store + """ + if key in self._callStack[0].variables: + for eachCall in self._callStack: + eachCall.variables[key] = value + else: + self._callStack[-1].variables[key] = value def delVariable(self, key:str) -> Optional[SSymbol]: - """ Delete a variable for a case insensitive name. + """ Delete a variable for a name. If the variable exists in the global scope, it is deleted in all scopes. + Otherwise, it is only deleted in the current scope. Args: key: Variable name @@ -1023,12 +1088,17 @@ def delVariable(self, key:str) -> Optional[SSymbol]: Return: Variable content, or None if variable is not defined. """ - key = key.lower() - if key in self.variables: - v = self.variables[key] - del self.variables[key] + try: + if key in self._callStack[0].variables: + v = self._callStack[-1].variables[key] # return latest value afterwards + for eachCall in self._callStack: + del eachCall.variables[key] + else: + v = self._callStack[-1].variables.get(key) + del self._callStack[-1].variables[key] return v - return None + except KeyError: + return None def getEnvironmentVariable(self, key:str) -> SSymbol: @@ -1040,17 +1110,17 @@ def getEnvironmentVariable(self, key:str) -> SSymbol: Return: Environment variable content, or None. """ - return self.environment.get(key.lower()) + return self.environment.get(key) def setEnvironmentVariable(self, key:str, value:SSymbol) -> None: - """ Set an environment variable for a case insensitive name. + """ Set an environment variable for a name. Args: key: Environment variable name value: Value to store """ - self.environment[key.lower()] = value + self.environment[key] = value def clearEnvironment(self) -> None: @@ -1095,6 +1165,17 @@ def scriptName(self, name:str) -> None: name: Name of the script. """ self.meta['name'] = name + + + def setMaxRuntime(self, maxRuntime:float) -> None: + """ Set the maximum runtime of the script. + + Args: + maxRuntime: Maximum runtime in seconds. + """ + if self.state == PState.running: + raise PUnsupportedError(self.setError(PError.runtime, f'Cannot set runtime while script is running')) + self.maxRuntime = maxRuntime def getMeta(self, key:str, default:Optional[str] = '') -> str: @@ -1123,9 +1204,10 @@ def hasMeta(self, key:str) -> bool: def getArgument(self, symbol:SSymbol, - idx:int = None, - expectedType:SType|Tuple[SType, ...] = None, - doEval:bool = True) -> PContext: + idx:Optional[int] = None, + expectedType:Optional[SType|Tuple[SType, ...]] = None, + doEval:Optional[bool] = True, + optional:Optional[bool] = False) -> PContext: """ Verify that an expression is a list and return an argument symbol, while optionally verify the allowed type(s) for that argument. @@ -1138,6 +1220,7 @@ def getArgument(self, symbol:SSymbol, idx: Optional index if the symbol contains a list of symbols. expectedType: one or multiple data types that are allowed for the retrieved argument symbol. doEval: Optionally recursively evaluate the symbol. + optional: Allow the argument to be None. Return: Result `PContext` object with the result, possible changed variable and other states. @@ -1159,6 +1242,9 @@ def getArgument(self, symbol:SSymbol, if expectedType is not None: if isinstance(expectedType, SType): expectedType = ( expectedType, ) + # add NIL if optional + if optional: + expectedType = expectedType + ( SType.tNIL, ) if pcontext.result is not None and pcontext.result.type not in expectedType: raise PInvalidArgumentError(self.setError(PError.invalid, f'expression: {symbol} - invalid type for argument: {_symbol}, expected type: {expectedType}, is: {pcontext.result.type}')) @@ -1168,9 +1254,10 @@ def getArgument(self, symbol:SSymbol, def valueFromArgument(self, symbol:SSymbol, - idx:int = None, - expectedType:SType|Tuple[SType, ...] = None, - doEval:bool = True) -> Tuple[PContext, Any]: + idx:Optional[int] = None, + expectedType:Optional[SType|Tuple[SType, ...]] = None, + doEval:Optional[bool] = True, + optional:Optional[bool] = False) -> Tuple[PContext, Any]: """ Return the actual value from an argument symbol. Args: @@ -1178,18 +1265,24 @@ def valueFromArgument(self, symbol:SSymbol, idx: Optional index if the symbol contains a list of symbols. expectedType: one or multiple data types that are allowed for the retrieved argument symbol. doEval: Optionally recursively evaluate the symbol. + optional: Allow the argument to be optional. Return: Result tuple of the updated `PContext` object with the result and the value. """ - p,r = self.resultFromArgument(symbol, idx, expectedType, doEval) - return (p, r.value) + if idx < symbol.length: + p, r = self.resultFromArgument(symbol, idx, expectedType, doEval, optional) + return (p, r.value) + elif optional: + return (self, None) + raise PInvalidArgumentError(self.setError(PError.invalid, f'expression: {symbol} - invalid argument index: {idx}')) def resultFromArgument(self, symbol:SSymbol, - idx:int = None, - expectedType:SType|Tuple[SType, ...] = None, - doEval:bool = True) -> Tuple[PContext, SSymbol]: + idx:Optional[int] = None, + expectedType:Optional[SType|Tuple[SType, ...]] = None, + doEval:Optional[bool] = True, + optional:Optional[bool] = False) -> Tuple[PContext, SSymbol]: """ Return the `SSymbol` result from an argument symbol. Args: @@ -1197,11 +1290,12 @@ def resultFromArgument(self, symbol:SSymbol, idx: Optional index if the symbol contains a list of symbols. expectedType: one or multiple data types that are allowed for the retrieved argument symbol. doEval: Optionally recursively evaluate the symbol. + optional: Allow the argument to be optional. Return: Result tuple of the updated `PContext` object with the result and the symbol. """ - return (p := self.getArgument(symbol, idx, expectedType, doEval), p.result) + return (p := self.getArgument(symbol, idx, expectedType, doEval, optional), p.result) def executeSubexpression(self, expression:str) -> PContext: @@ -1229,6 +1323,8 @@ def executeSubexpression(self, expression:str) -> PContext: raise PInvalidArgumentError(self) self.result = None self.run(arguments = self.argv, isSubCall = True) # might throw exception + if self.state in(PState.terminated, PState.terminatedWithResult): # Correct state for subcall + self.state = PState.running self.ast = _ast self.script = _script return self @@ -1332,7 +1428,7 @@ def _terminating(pcontext:PContext) -> None: # Start running self.state = PState.running - if self.maxRuntime is not None: # set max runtime + if self.maxRuntime: # > 0 or not None: set max runtime self._maxRTimestamp = _utcTimestamp() + self.maxRuntime if (scriptName := self.scriptName) and not isSubCall: if self.verbose: @@ -1376,6 +1472,7 @@ def _executeExpression(self, symbol:SSymbol, parentSymbol:SSymbol) -> PContext: Args: symbol: The symbol to execute. + parentSymbol: The parent symbol of the symbol to execute. Return: The updated `PContext` object with the result. @@ -1396,62 +1493,61 @@ def _executeExpression(self, symbol:SSymbol, parentSymbol:SSymbol) -> PContext: return self.setResult(SSymbol()) firstSymbol = symbol[0] if symbol.length and symbol.type == SType.tList else symbol - if firstSymbol.type == SType.tList: - if firstSymbol.length > 0: - # implicit progn - return _doProgn(self, SSymbol(lst = [ SSymbol(symbol = 'progn') ] + symbol.value )) #type:ignore[operator] - else: - self.result = SSymbol() - return self - - elif firstSymbol.type == SType.tListQuote: - return _doQuote(self, SSymbol(lst = [ SSymbol(symbol = 'quote'), SSymbol(lst = firstSymbol.value)])) - - elif firstSymbol.type == SType.tSymbol: - _s = cast(str, firstSymbol.value) - - # Just return already boolean values in the result here - if (_fn := self.functions.get(_s)) is not None: - return self._executeFunction(symbol, _s, _fn) - elif (_cb := self.symbols.get(_s)) is not None: # type:ignore[arg-type] - if self.monitorFunc: - self.monitorFunc(self, firstSymbol) - return _cb(self, symbol) - elif _s in self.call.arguments: - self.result = deepcopy(self.call.arguments[_s]) - return self - elif _s in self.variables: - self.result = deepcopy(self.variables[_s]) - return self - elif _s in self.environment: - self.result = deepcopy(self.environment[_s]) - return self - - # Try to get the symbol's value from the caller, if possible - else: - if self.fallbackFunc: - return self.fallbackFunc(self, symbol) - raise PUndefinedError(self.setError(PError.undefined, f'undefined symbol: {_s} | in symbol: {parentSymbol}')) - - elif firstSymbol.type == SType.tSymbolQuote: - return _doQuote(self, SSymbol(lst = [ SSymbol(symbol = 'quote'), SSymbol(symbol = firstSymbol.value)])) - - elif firstSymbol.type == SType.tLambda: - return self._executeFunction(symbol, cast(str, firstSymbol.value)) + match firstSymbol.type: + case SType.tString: + return self.checkInStringExpressions(firstSymbol) + + case SType.tNumber | SType.tBool | SType.tNIL: + return self.setResult(firstSymbol) # type:ignore [arg-type] + + case SType.tJson: + return self.checkInStringExpressions(symbol) - elif firstSymbol.type == SType.tString: - return self.checkInStringExpressions(firstSymbol) + case SType.tList: + if firstSymbol.length > 0: + # implicit progn + return _doProgn(self, SSymbol(lst = [ SSymbol(symbol = 'progn') ] + symbol.value )) #type:ignore[operator] + else: + self.result = SSymbol() + return self - elif firstSymbol.type == SType.tNumber: - return self.setResult(firstSymbol) # type:ignore [arg-type] + case SType.tListQuote: + return _doQuote(self, SSymbol(lst = [ SSymbol(symbol = 'quote'), SSymbol(lst = firstSymbol.value)])) + + case SType.tSymbol: + _s = cast(str, firstSymbol.value) + + # Execute function, if defined, or try to find the value in variables, environment, etc. + if (_fn := self.functions.get(_s)) is not None: + return self._executeFunction(symbol, _s, _fn) + elif (_cb := self.symbols.get(_s)) is not None: # type:ignore[arg-type] + if self.monitorFunc: + self.monitorFunc(self, firstSymbol) + return _cb(self, symbol) + elif _s in self.call.arguments: + self.result = deepcopy(self.call.arguments[_s]) + return self + elif _s in self.variables: + self.result = deepcopy(self.variables[_s]) + return self + elif _s in self.environment: + self.result = deepcopy(self.environment[_s]) + return self + + # Try to get the symbol's value from the caller as a last resort + else: + if self.fallbackFunc: + return self.fallbackFunc(self, symbol) + raise PUndefinedError(self.setError(PError.undefined, f'undefined symbol: {_s} | in symbol: {parentSymbol}')) - elif firstSymbol.type == SType.tBool: - return self.setResult(firstSymbol) + case SType.tSymbolQuote: + return _doQuote(self, SSymbol(lst = [ SSymbol(symbol = 'quote'), SSymbol(symbol = firstSymbol.value)])) - elif firstSymbol.type == SType.tJson: - return self.checkInStringExpressions(symbol) - - raise PInvalidArgumentError(self.setError(PError.invalid, f'Unexpected symbol: {firstSymbol.type} - {firstSymbol}')) + case SType.tLambda: + return self._executeFunction(symbol, cast(str, firstSymbol.value)) + + case _: + raise PInvalidArgumentError(self.setError(PError.invalid, f'Unexpected symbol: {firstSymbol.type} - {firstSymbol}')) def checkInStringExpressions(self, symbol:SSymbol) -> PContext: @@ -1569,7 +1665,7 @@ def _joinExpression(self, symbols:list[SSymbol], sep:str = ' ') -> PContext: PSymbolCallable = Callable[[PContext, SSymbol], PContext] """ Signature of a symbol callable. The callbacks are called with a `PContext` object and is supposed to return - it again, or None in case of an error. + it again, updated with a return value, or *None* in case of an error. """ PLogCallable = Callable[[PContext, str], None] @@ -1860,12 +1956,14 @@ def _doCons(pcontext:PContext, symbol:SSymbol) -> PContext: # get second symbol pcontext, _second = pcontext.valueFromArgument(symbol, 2) - if _second.type in [SType.tList, SType.tListQuote]: - pcontext.result = deepcopy(_second) - elif _second.type == SType.tNIL: - pcontext.result = SSymbol(lst = []) - else: - pcontext.result = SSymbol(lst = [ deepcopy(_second) ]) + match _second.type: + case SType.tList | SType.tListQuote: + pcontext.result = deepcopy(_second) + case SType.tNIL: + pcontext.result = SSymbol(lst = []) + case _: + pcontext.result = SSymbol(lst = [ deepcopy(_second) ]) + cast(list, pcontext.result.value).insert(0, deepcopy(_first)) return pcontext @@ -1892,8 +1990,11 @@ def _doDatetime(pcontext:PContext, symbol:SSymbol) -> PContext: """ pcontext.assertSymbol(symbol, maxLength = 2) _format = '%Y%m%dT%H%M%S.%f' - if symbol.length == 2: - pcontext, _format = pcontext.valueFromArgument(symbol, 1, SType.tString) + + # get format + pcontext, format = pcontext.valueFromArgument(symbol, 1, SType.tString, optional = True) + if format is None: + format = _format return pcontext.setResult(SSymbol(string = _utcNow().strftime(_format))) @@ -1946,6 +2047,131 @@ def _doDefun(pcontext:PContext, symbol:SSymbol) -> PContext: return pcontext +def _doDolist(pcontext:PContext, symbol:SSymbol) -> PContext: + pcontext.assertSymbol(symbol, 3) + + # arguments + pcontext, _arguments = pcontext.valueFromArgument(symbol, 1, SType.tList, doEval = False) # don't evaluate the argument + if 2 <= len(_arguments) <= 3: + # get loop variable + _loopvar = cast(SSymbol, _arguments[0]) + if _loopvar.type != SType.tSymbol: + raise PInvalidArgumentError(pcontext.setError(PError.invalid, f'dolist "var" must be a symbol, got: {pcontext.result.type}')) + + # get list to loop over + pcontext = pcontext._executeExpression(_arguments[1], _arguments) + if pcontext.result.type not in (SType.tList, SType.tListQuote): + raise PInvalidArgumentError(pcontext.setError(PError.invalid, f'dolist "list" must be a (quoted) list, got: {pcontext.result.type}')) + _looplist = pcontext.result + else: + raise PInvalidArgumentError(pcontext.setError(PError.invalid, f'dolist first argument requires 2 or 3 arguments, got: {len(_arguments)}')) + + # Get result variable name + if len(_arguments) == 3: + _resultvar = cast(SSymbol, _arguments[2]) + if _resultvar.type != SType.tSymbol: + raise PInvalidArgumentError(pcontext.setError(PError.invalid, f'dolist "result" must be a symbol, got: {pcontext.result.type}')) + + # if the variable does not exist, create it as a nil symbol + if not str(_resultvar) in pcontext.variables: + pcontext.setVariable(str(_resultvar), SSymbol()) + else: + _resultvar = None + + # code + pcontext, _code = pcontext.valueFromArgument(symbol, 2, SType.tList, doEval = False) # don't evaluate the argument (yet) + _code = SSymbol(lst = _code) # We got a python list, but need a SSymbol list + + # execute the code + pcontext.setVariable(str(_loopvar), SSymbol(number = Decimal(0))) + for i in _looplist.value: # type:ignore[union-attr] + pcontext.setVariable(str(_loopvar), i) # type:ignore[arg-type] + pcontext = pcontext._executeExpression(_code, symbol) + + # set the result + if _resultvar: + pcontext.result = pcontext.variables[str(_resultvar)] + else: + pcontext.result = SSymbol() + + # return + return pcontext + + + +def _doDotimes(pcontext:PContext, symbol:SSymbol) -> PContext: + """ This function executes a code block a number of times. + + The first argument is a list that contains the loop counter symbol and the + loop limit. An optional third argument is the result variable for the loop. + The second argument is the code block to execute. + + Example: + :: + + (dotimes (i 10) (print i)) + (dotimes (i 10 result) (setq result i)) + + Args: + pcontext: Current `PContext` for the script. + symbol: The symbol to execute. + + Return: + The updated `PContext` object. The result + + """ + pcontext.assertSymbol(symbol, 3) + + # arguments + pcontext, _arguments = pcontext.valueFromArgument(symbol, 1, SType.tList, doEval = False) # don't evaluate the argument + if 2 <= len(_arguments) <= 3: + # get loop variable + _loopvar = cast(SSymbol, _arguments[0]) + if _loopvar.type != SType.tSymbol: + raise PInvalidArgumentError(pcontext.setError(PError.invalid, f'dotimes "counter" must be a symbol, got: {pcontext.result.type}')) + + # get loop count + pcontext = pcontext._executeExpression(_arguments[1], _arguments) + if pcontext.result.type != SType.tNumber: + raise PInvalidArgumentError(pcontext.setError(PError.invalid, f'dotimes "count" must be a number, got: {pcontext.result.type}')) + _loopcount = pcontext.result + if int(_loopcount.value) < 0: # type:ignore[arg-type] + raise PInvalidArgumentError(pcontext.setError(PError.invalid, f'dotimes "count" must be a non-negative number, got: {_loopcount.value}')) + else: + raise PInvalidArgumentError(pcontext.setError(PError.invalid, f'dotimes first argument requires 2 or 3 arguments, got: {len(_arguments)}')) + + # Get result variable name + if len(_arguments) == 3: + _resultvar = cast(SSymbol, _arguments[2]) + if _resultvar.type != SType.tSymbol: + raise PInvalidArgumentError(pcontext.setError(PError.invalid, f'dotimes "result" must be a symbol, got: {pcontext.result.type}')) + + # if the variable does not exist, create it as a nil symbol + if not str(_resultvar) in pcontext.variables: + pcontext.setVariable(str(_resultvar), SSymbol()) + else: + _resultvar = None + + # code + pcontext, _code = pcontext.valueFromArgument(symbol, 2, SType.tList, doEval = False) # don't evaluate the argument (yet) + _code = SSymbol(lst = _code) # We got a python list, but must have a SSymbol list + + # execute the code + pcontext.setVariable(str(_loopvar), SSymbol(number = Decimal(0))) + for i in range(0, int(cast(Decimal, _loopcount.value))): + pcontext.setVariable(str(_loopvar), SSymbol(number = Decimal(i))) + pcontext = pcontext._executeExpression(_code, symbol) + + # set the result + if _resultvar: + pcontext.result = pcontext.variables[str(_resultvar)] + else: + pcontext.result = SSymbol() + + # return + return pcontext + + def _doError(pcontext:PContext, symbol:SSymbol) -> PContext: """ End script execution with an error. The optional argument will be assigned as the result of the script (pcontext.result). @@ -2038,17 +2264,19 @@ def _doGetJSONAttribute(pcontext:PContext, symbol:SSymbol) -> PContext: """ def _toSymbol(value:Any) -> SSymbol: - if isinstance(value, str): - return SSymbol(string = value) - elif isinstance(value, (int, float)): - return SSymbol(number = Decimal(value)) - elif isinstance(value, dict): - return SSymbol(jsn = value) - elif isinstance(value, bool): - return SSymbol(boolean = value) - elif isinstance(value, list): - return SSymbol(lst = [ _toSymbol(l) for l in value]) - return SSymbol() # nil + match value: + case str(): + return SSymbol(string = value) + case int(), float(): + return SSymbol(number = Decimal(value)) + case dict(): + return SSymbol(jsn = value) + case bool(): + return SSymbol(boolean = value) + case list(): + return SSymbol(lst = [ _toSymbol(l) for l in value]) + case _: + return SSymbol() # nil pcontext.assertSymbol(symbol, 3) @@ -2111,7 +2339,10 @@ def _doIf(pcontext:PContext, symbol:SSymbol) -> PContext: """ pcontext.assertSymbol(symbol, minLength = 3) - pcontext, _e = pcontext.valueFromArgument(symbol, 1, SType.tBool) + pcontext, _e = pcontext.valueFromArgument(symbol, 1, (SType.tBool, SType.tNIL, SType.tList, SType.tListQuote, SType.tString)) + if isinstance(_e, (list, str)): + _e = len(_e) > 0 + if _e: _p = pcontext._executeExpression(symbol[2], symbol) elif symbol.length == 4: @@ -2145,7 +2376,6 @@ def _doIn(pcontext:PContext, symbol:SSymbol) -> PContext: # Get symbol (!) to check pcontext, _s = pcontext.resultFromArgument(symbol, 2, (SType.tString, SType.tList, SType.tListQuote)) - # check return pcontext.setResult(SSymbol(boolean = _v in _s)) @@ -2193,7 +2423,7 @@ def _doIncDec(pcontext:PContext, symbol:SSymbol, isInc:Optional[bool] = True) -> # Increment / decrement and Re-assign variable value.value = (cast(Decimal, value.value) + idValue) if isInc else (cast(Decimal, value.value) - idValue) - pcontext.variables[variable.value] = value + pcontext.setVariable(variable.value, value) return pcontext.setResult(deepcopy(value)) @@ -2405,7 +2635,7 @@ def _doLet(pcontext:PContext, symbol:SSymbol, sequential:bool = True) -> PContex # get value and assign variable (symbol!) pcontext, result = pcontext.resultFromArgument(cast(SSymbol, symbol.value), 1) - pcontext.variables[variablename] = result + pcontext.setVariable(variablename, result) return pcontext @@ -2626,7 +2856,7 @@ def _doOperation(pcontext:PContext, symbol:SSymbol, op:Callable, tp:SType) -> PC """ pcontext.assertSymbol(symbol, minLength = 2) - r1 = pcontext._executeExpression(symbol[1], symbol).result + r1 = deepcopy(pcontext._executeExpression(symbol[1], symbol).result) for i in range(2, symbol.length): try: @@ -2950,7 +3180,7 @@ def _doSetq(pcontext:PContext, symbol:SSymbol) -> PContext: # value pcontext, _value = pcontext.resultFromArgument(symbol, 2) - pcontext.variables[_var] = _value + pcontext.setVariable(_var, _value) return pcontext @@ -3240,7 +3470,9 @@ def _doWhile(pcontext:PContext, symbol:SSymbol) -> PContext: while True: # evaluate while expression - pcontext, _e = pcontext.valueFromArgument(symbol, 1, SType.tBool) + pcontext, _e = pcontext.valueFromArgument(symbol, 1, (SType.tBool, SType.tNIL, SType.tList, SType.tListQuote, SType.tString)) + if isinstance(_e, (list, str)): + _e = len(_e) > 0 if not _e: break @@ -3288,6 +3520,8 @@ def _doWhile(pcontext:PContext, symbol:SSymbol) -> PContext: 'datetime': _doDatetime, 'dec': lambda p, a: _doIncDec(p, a, False), 'defun': _doDefun, + 'dolist': _doDolist, + 'dotimes': _doDotimes, 'eval': _doEval, 'evaluate-inline': _doEvaluateInline, 'false': lambda p, a: _doBoolean(p, a, False), diff --git a/acme/helpers/KeyHandler.py b/acme/helpers/KeyHandler.py index b51fd54b..ae5bdaf8 100644 --- a/acme/helpers/KeyHandler.py +++ b/acme/helpers/KeyHandler.py @@ -16,7 +16,7 @@ from enum import Enum _timeout = 0.5 - +""" Timeout for getch() in seconds. """ try: # Posix, Linux, Mac OS @@ -27,123 +27,226 @@ class FunctionKey(str, Enum): # Common LF = '\x0a' + """ Line feed. """ CR = '\x0d' + """ Carriage return. """ SPACE = '\x20' + """ Space. """ # ESC = '\x1b' BACKSPACE = '\x7f' + """ Backspace. """ TAB = '\x09' + """ Tab. """ SHIFT_TAB = '\x1b\x5b\x5a' + """ Shift tab. """ # CTRL-Keys CTRL_A = '\x01' + """ Ctrl-A. """ CTRL_B = '\x02' + """ Ctrl-B. """ CTRL_C = '\x03' + """ Ctrl-C. """ CTRL_D = '\x04' + """ Ctrl-D. """ CTRL_E = '\x05' + """ Ctrl-E. """ CTRL_F = '\x06' + """ Ctrl-F. """ CTRL_G = '\x07' + """ Ctrl-G. """ CTRL_H = '\x08' + """ Ctrl-H. """ CTRL_I = TAB + """ Ctrl-I. Mappped to TAB. """ CTRL_J = LF + """ Ctrl-J. Mapped to Line Feed. """ CTRL_K = '\x0b' + """ Ctrl-K. """ CTRL_L = '\x0c' + """ Ctrl-L. """ CTRL_M = CR + """ Ctrl-M. Mapped to Carriage Return. """ CTRL_N = '\x0e' + """ Ctrl-N. """ CTRL_O = '\x0f' + """ Ctrl-O. """ CTRL_P = '\x10' + """ Ctrl-P. """ CTRL_Q = '\x11' + """ Ctrl-Q. """ CTRL_R = '\x12' + """ Ctrl-R. """ CTRL_S = '\x13' + """ Ctrl-S. """ CTRL_T = '\x14' + """ Ctrl-T. """ CTRL_U = '\x15' + """ Ctrl-U. """ CTRL_V = '\x16' + """ Ctrl-V. """ CTRL_W = '\x17' + """ Ctrl-W. """ CTRL_X = '\x18' + """ Ctrl-X. """ CTRL_Y = '\x19' + """ Ctrl-Y. """ CTRL_Z = '\x1a' + """ Ctrl-Z. """ # Cursor keys UP = '\x1b\x5b\x41' + """ Cursor up. """ DOWN = '\x1b\x5b\x42' + """ Cursor down. """ LEFT = '\x1b\x5b\x44' + """ Cursor left. """ RIGHT = '\x1b\x5b\x43' + """ Cursor right. """ SHIFT_UP = '\x1b\x5b\x31\x3b\x32\x41' + """ Shift cursor up. """ SHIFT_DOWN = '\x1b\x5b\x31\x3b\x32\x42' + """ Shift cursor down. """ SHIFT_RIGHT = '\x1b\x5b\x31\x3b\x32\x43' + """ Shift cursor right. """ SHIFT_LEFT = '\x1b\x5b\x31\x3b\x32\x44' + """ Shift cursor left. """ CTRL_UP = '\x1b\x5b\x31\x3b\x35\x41' + """ Ctrl cursor up. """ CTRL_DOWN = '\x1b\x5b\x31\x3b\x35\x42' + """ Ctrl cursor down. """ CTRL_RIGHT = '\x1b\x5b\x31\x3b\x35\x43' + """ Ctrl cursor right. """ CTRL_LEFT = '\x1b\x5b\x31\x3b\x35\x44' + """ Ctrl cursor left. """ ALT_UP = '\x1b\x1b\x5b\x41' + """ Alt cursor up. """ ALT_DOWN = '\x1b\x1b\x5b\x42' + """ Alt cursor down. """ ALT_RIGHT = '\x1b\x1b\x5b\x43' + """ Alt cursor right. """ ALT_LEFT = '\x1b\x1b\x5b\x44' + """ Alt cursor left. """ SHIFT_ALT_UP = '\x1b\x5b\x31\x3b\x31\x30\x41' + """ Shift Alt cursor up. """ SHIFT_ALT_DOWN = '\x1b\x5b\x31\x3b\x31\x30\x42' + """ Shift Alt cursor down. """ SHIFT_ALT_RIGHT = '\x1b\x5b\x31\x3b\x31\x30\x43' + """ Shift Alt cursor right. """ SHIFT_ALT_LEFT = '\x1b\x5b\x31\x3b\x31\x30\x44' + """ Shift Alt cursor left. """ SHIFT_CTRL_UP = '\x1b\x5b\x31\x3b\x36\x41' + """ Shift Ctrl cursor up. """ SHIFT_CTRL_DOWN = '\x1b\x5b\x31\x3b\x36\x42' + """ Shift Ctrl cursor down. """ SHIFT_CTRL_RIGHT = '\x1b\x5b\x31\x3b\x36\x43' + """ Shift Ctrl cursor right. """ SHIFT_CTRL_LEFT = '\x1b\x5b\x31\x3b\x36\x44' + """ Shift Ctrl cursor left. """ SHIFT_CTRL_ALT_UP = '\x1b\x5b\x31\x3b\x31\x34\x41' + """ Shift Ctrl Alt cursor up. """ SHIFT_CTRL_ALT_DOWN = '\x1b\x5b\x31\x3b\x31\x34\x42' + """ Shift Ctrl Alt cursor down. """ SHIFT_CTRL_ALT_RIGHT= '\x1b\x5b\x31\x3b\x31\x34\x43' + """ Shift Ctrl Alt cursor right. """ SHIFT_CTRL_ALT_LEFT = '\x1b\x5b\x31\x3b\x31\x34\x44' + """ Shift Ctrl Alt cursor left. """ # Navigation keys INSERT = '\x1b\x5b\x32\x7e' + """ Insert. """ SUPR = '\x1b\x5b\x33\x7e' + """ Supr. """ HOME = '\x1b\x5b\x48' + """ Home. """ SHIFT_HOME = '\x1b\x5b\x31\x3b\x32\x48' + """ Shift Home. """ CTRL_HOME = '\x1b\x5b\x31\x3b\x35\x48' + """ Ctrl Home. """ ALT_HOME = '\x1b\x5b\x31\x3b\x39\x48' + """ Alt Home. """ SHIFT_CTRL_HOME = '\x1b\x5b\x31\x3b\x36\x48' + """ Shift Ctrl Home. """ SHIFT_ALT_HOME = '\x1b\x5b\x31\x3b\x31\x30\x48' + """ Shift Alt Home. """ SHIFT_CTRL_ALT_HOME = '\x1b\x5b\x31\x3b\x31\x34\x48' + """ Shift Ctrl Alt Home. """ END = '\x1b\x5b\x46' + """ End. """ SHIFT_END = '\x1b\x5b\x31\x3b\x32\x46' + """ Shift End. """ CTRL_END = '\x1b\x5b\x31\x3b\x35\x46' + """ Ctrl End. """ ALT_END = '\x1b\x5b\x31\x3b\x39\x46' + """ Alt End. """ SHIFT_CTRL_END = '\x1b\x5b\x31\x3b\x36\x46' + """ Shift Ctrl End. """ SHIFT_ALT_END = '\x1b\x5b\x31\x3b\x31\x30\x46' + """ Shift Alt End. """ SHIFT_CTRL_ALT_END = '\x1b\x5b\x31\x3b\x31\x34\x46' + """ Shift Ctrl Alt End. """ PAGE_UP = '\x1b\x5b\x35\x7e' + """ Page up. """ ALT_PAGE_UP = '\x1b\x1b\x5b\x35\x7e' - + """ Alt Page up. """ PAGE_DOWN = '\x1b\x5b\x36\x7e' + """ Page down. """ ALT_PAGE_DOWN = '\x1b\x1b\x5b\x36\x7e' + """ Alt Page down. """ # Funcion keys F1 = '\x1b\x4f\x50' + """ F1. """ F2 = '\x1b\x4f\x51' + """ F2. """ F3 = '\x1b\x4f\x52' + """ F3. """ F4 = '\x1b\x4f\x53' + """ F4. """ F5 = '\x1b\x5b\x31\x35\x7e' + """ F5. """ F6 = '\x1b\x5b\x31\x37\x7e' + """ F6. """ F7 = '\x1b\x5b\x31\x38\x7e' + """ F7. """ F8 = '\x1b\x5b\x31\x39\x7e' + """ F8. """ F9 = '\x1b\x5b\x32\x30\x7e' + """ F9. """ F10 = '\x1b\x5b\x32\x31\x7e' + """ F10. """ F11 = '\x1b\x5b\x32\x33\x7e' + """ F11. """ F12 = '\x1b\x5b\x32\x34\x7e' + """ F12. """ SHIFT_F1 = '\x1b\x5b\x31\x3b\x32\x50' + """ Shift F1. """ SHIFT_F2 = '\x1b\x5b\x31\x3b\x32\x51' + """ Shift F2. """ SHIFT_F3 = '\x1b\x5b\x31\x3b\x32\x52' + """ Shift F3. """ SHIFT_F4 = '\x1b\x5b\x31\x3b\x32\x53' + """ Shift F4. """ SHIFT_F5 = '\x1b\x5b\x31\x35\x3b\x32\x7e' + """ Shift F5. """ SHIFT_F6 = '\x1b\x5b\x31\x37\x3b\x32\x7e' + """ Shift F6. """ SHIFT_F7 = '\x1b\x5b\x31\x38\x3b\x32\x7e' + """ Shift F7. """ SHIFT_F8 = '\x1b\x5b\x31\x39\x3b\x32\x7e' + """ Shift F8. """ SHIFT_F9 = '\x1b\x5b\x32\x30\x3b\x32\x7e' + """ Shift F9. """ SHIFT_F10 = '\x1b\x5b\x32\x31\x3b\x32\x7e' + """ Shift F10. """ SHIFT_F11 = '\x1b\x5b\x32\x33\x3b\x32\x7e' + """ Shift F11. """ SHIFT_F12 = '\x1b\x5b\x32\x34\x3b\x32\x7e' + """ Shift F12. """ except ImportError: @@ -302,15 +405,18 @@ class FunctionKey(str, Enum): # type: ignore[no-redef] _errorInGetch:bool = False def getch() -> Optional[str|FunctionKey]: - """getch() -> key character + """ getch() -> key character + + Read a single keypress from stdin and return the resulting character. + Nothing is echoed to the console. This call will block if a keypress + is not already available, but will not wait for Enter to be pressed. - Read a single keypress from stdin and return the resulting character. - Nothing is echoed to the console. This call will block if a keypress - is not already available, but will not wait for Enter to be pressed. + If the pressed key was a modifier key, nothing will be detected; if + it were a special function key, it may return the first character of + of an escape sequence, leaving additional characters in the buffer. - If the pressed key was a modifier key, nothing will be detected; if - it were a special function key, it may return the first character of - of an escape sequence, leaving additional characters in the buffer. + Returns: + A single character str or a FunctionKey enum value. """ global _errorInGetch if _errorInGetch: # getch() doesnt't fully work previously, so just return @@ -339,7 +445,7 @@ def flushInput() -> None: sys.stdin.flush() _functionKeys:Tuple[FunctionKey, str] = [(e, e.value) for e in FunctionKey] # type:ignore -# TODO +""" List of all function keys. """ Commands = Dict[str, Callable[[str], None]] """ Mapping between characters and callback functions. """ @@ -493,6 +599,12 @@ def stopLoop() -> None: def readline(prompt:str='>') -> str: """ Read a line from the console. Catch EOF (^D) and Keyboard Interrup (^C). I that case None is returned. + + Args: + prompt: The prompt to display before the input. + + Returns: + The input line or None. """ answer = None try: @@ -504,6 +616,15 @@ def readline(prompt:str='>') -> str: return answer def waitForKeypress(s:float) -> Optional[str]: + """ Wait for a keypress for a maximum of *s* seconds. + If no key was pressed then return None. + + Args: + s: Maximum time to wait in seconds. + + Returns: + The key that was pressed or None. + """ for i in range(0, int(s * 1.0 / _timeout)): ch = None try: diff --git a/acme/helpers/MQTTConnection.py b/acme/helpers/MQTTConnection.py index b2c84621..c63e9acd 100644 --- a/acme/helpers/MQTTConnection.py +++ b/acme/helpers/MQTTConnection.py @@ -6,6 +6,7 @@ # # Implementation of an MQTT Client helper class. # +""" Implementation of an MQTT Client helper class. """ from __future__ import annotations from typing import Callable, Any, Tuple, Optional @@ -24,10 +25,15 @@ class MQTTTopic: """ Structure that represents a subscribed-to topic. """ topic:str = None + """ The MQTT topic. """ mid:int = None + """ The message ID of the MQTT subscription. """ isSubscribed:bool = False + """ Whether the topic is subscribed to. """ callback:MQTTCallback = None + """ The callback function for the topic. """ callbackArgs:dict = None + """ The callback arguments for the topic. """ class MQTTHandler(object): @@ -43,38 +49,88 @@ def onConnect(self, connection:MQTTConnection) -> bool: """ This method is called after the MQTT client connected to the MQTT broker. Usually, an MQTT client should subscribe to topics and register the callback methods here. + + Args: + connection: The MQTT connection. + + Returns: + True if successful, False otherwise. """ return True + def onDisconnect(self, connection:MQTTConnection) -> bool: """ This method is called after the MQTT client disconnected from the MQTT broker. + + Args: + connection: The MQTT connection. + + Returns: + True if successful, False otherwise. """ return True + def onSubscribed(self, connection:MQTTConnection, topic:str) -> bool: """ This method is called after the MQTT client successfully subsribed to a topic. + + Args: + connection: The MQTT connection. + topic: The topic that was subscribed to. + + Returns: + True if successful, False otherwise. """ connection.subscribedCount += 1 return True + def onUnsubscribed(self, connection:MQTTConnection, topic:str) -> bool: """ This method is called after the MQTT client successfully unsubsribed from a topic. + + Args: + connection: The MQTT connection. + topic: The topic that was unsubscribed from. + + Returns: + True if successful, False otherwise. """ connection.subscribedCount -= 1 return True + def onError(self, connection:MQTTConnection, rc:int) -> bool: """ This method is called when receiving an error when communicating with the MQTT broker. + + Args: + connection: The MQTT connection. + rc: The error code. + + Returns: + True if successful, False otherwise. """ return True + def logging(self, connection:MQTTConnection, level:int, message:str) -> bool: """ This method is called when a log message should be handled. + + Args: + connection: The MQTT connection. + level: The log level. + message: The log message. + + Returns: + True if successful, False otherwise. """ return True + def onShutdown(self, connection:MQTTConnection) -> None: """ This method is called after the ```connection``` was shut down. + + Args: + connection: The MQTT connection. """ @@ -82,7 +138,9 @@ def onShutdown(self, connection:MQTTConnection) -> None: class MQTTConnection(object): - + """ This class implements an MQTT client. It is a wrapper around the paho MQTT client. + It is implemented as a BackgroundWorker/Actor, so it runs in its own thread. + """ __slots__ = ( 'address', @@ -106,6 +164,7 @@ class MQTTConnection(object): 'actor', 'subscribedTopics', ) + """ Slots of the class. """ # # Runtime methods @@ -126,33 +185,75 @@ def __init__(self, address:str, lowLevelLogging:bool = True, messageHandler:MQTTHandler = None ) -> None: + """ Constructor. Initialize the MQTT client. + + Args: + address: The address of the MQTT broker. + port: The port of the MQTT broker. + keepalive: The keepalive time for the MQTT connection. + interface: The interface to bind to. + clientID: The client ID for the MQTT client. + username: The username for the MQTT broker. + password: The password for the MQTT broker. + useTLS: Whether to use TLS for the MQTT connection. + caFile: The CA file for the MQTT broker's certificate. + verifyCertificate: Indicator whether to verify the MQTT broker's certificate. + certfile: The certificate file for the MQTT client. + keyfile: The key file for the MQTT client. + lowLevelLogging: Indicator whether to log MQTT messages. + messageHandler: The message handler. + """ + self.address = address + """ The address of the MQTT broker. """ self.port = port if port else 8883 if useTLS else 1883 + """ The port of the MQTT broker. """ self.keepalive = keepalive + """ The keepalive time for the MQTT connection. """ self.bindIF = interface + """ The interface to bind to. """ self.username:str = username + """ The username for the MQTT broker. """ self.password:str = password + """ The password for the MQTT broker. """ self.useTLS:bool = useTLS + """ Whether to use TLS for the MQTT connection. """ self.verifyCertificate = verifyCertificate + """ Indicator whether to verify the MQTT broker's certificate. """ self.caFile = caFile + """ The CA file for the MQTT broker's certificate. """ self.mqttsCertfile = certfile + """ The certificate file for the MQTT client. """ self.mqttsKeyfile = keyfile + """ The key file for the MQTT client. """ self.clientID = clientID + """ The client ID for the MQTT client. """ self.lowLevelLogging = lowLevelLogging + """ Indicator whether to log MQTT messages. """ self.isStopped = True + """ Indicator whether the MQTT client is stopped.""" self.isConnected = False + """ Indicator whether the MQTT client is connected.""" self.subscribedCount = 0 + """ The number of subscribed-to topics. """ self.mqttClient:mqtt.Client = None + """ The MQTT client. """ self.messageHandler:MQTTHandler = messageHandler + """ The message handler. """ self.actor:BackgroundWorker = None + """ The actor for the MQTT client. """ self.subscribedTopics:dict[str, MQTTTopic] = {} + """ The list of subscribed-to topics. """ def shutdown(self) -> bool: """ Shutting down the MQTT client. + + Returns: + True if successful, False otherwise. """ self.isStopped = True @@ -215,7 +316,10 @@ def run(self) -> None: def _mqttActor(self) -> bool: - """ Backgroundworker callback to run the actuall MQTT loop. + """ BackgroundWorker callback to run the actuall MQTT loop. + + Returns: + Always True. """ self.isStopped = False self.messageHandler and self.messageHandler.logging(self.mqttClient, logging.INFO, 'MQTT: client started') @@ -232,6 +336,12 @@ def _mqttActor(self) -> bool: def _onConnect(self, client:mqtt.Client, userdata:Any, flags:dict, rc:int) -> None: """ Callback when the MQTT client connected to the broker. + + Args: + client: The MQTT client. + userdata: User data. + flags: Flags. + rc: Result code. """ self.messageHandler and self.messageHandler.logging(self, logging.DEBUG, f'MQTT: Connected with result code: {rc} ({mqtt.error_string(rc)})') if rc == 0: @@ -246,32 +356,55 @@ def _onConnect(self, client:mqtt.Client, userdata:Any, flags:dict, rc:int) -> No def _onDisconnect(self, client:mqtt.Client, userdata:Any, rc:int) -> None: """ Callback when the MQTT client disconnected from the broker. + + Args: + client: The MQTT client. + userdata: User data. + rc: Result code. """ self.messageHandler and self.messageHandler.logging(self, logging.DEBUG, f'MQTT: Disconnected with result code: {rc} ({mqtt.error_string(rc)})') self.subscribedTopics.clear() - if rc == 0: - self.isConnected = False - self.messageHandler and self.messageHandler.onDisconnect(self) - elif rc == 7: - self.isConnected = False - self.messageHandler.logging(self, logging.ERROR, f'MQTT: Cannot disconnect from broker. Result code: {rc} ({mqtt.error_string(rc)})') - self.messageHandler.logging(self, logging.ERROR, f'MQTT: Did another client connected with the same ID ({self.clientID})?') - self.messageHandler and self.messageHandler.onDisconnect(self) - else: - self.isConnected = False - if self.messageHandler: + + match rc: + case 0: + self.isConnected = False + self.messageHandler and self.messageHandler.onDisconnect(self) + case 7: + self.isConnected = False self.messageHandler.logging(self, logging.ERROR, f'MQTT: Cannot disconnect from broker. Result code: {rc} ({mqtt.error_string(rc)})') - self.messageHandler.onDisconnect(self) - self.messageHandler.onError(self, rc) + self.messageHandler.logging(self, logging.ERROR, f'MQTT: Did another client connected with the same ID ({self.clientID})?') + self.messageHandler and self.messageHandler.onDisconnect(self) + case _: + self.isConnected = False + if self.messageHandler: + self.messageHandler.logging(self, logging.ERROR, f'MQTT: Cannot disconnect from broker. Result code: {rc} ({mqtt.error_string(rc)})') + self.messageHandler.onDisconnect(self) + self.messageHandler.onError(self, rc) def _onLog(self, client:mqtt.Client, userdata:Any, level:int, buf:str) -> None: - """ Mapping of the paho MQTT client's log to the logging system. Also handles different log-level scheme. + """ Mapping of the paho MQTT client's log to the logging system. + Also handles different log-level scheme. + + Args: + client: The MQTT client. + userdata: User data. + level: Log level. + buf: Log message. """ self.lowLevelLogging and self.messageHandler and self.messageHandler.logging(self, mqtt.LOGGING_LEVEL[level], f'MQTT: {buf}') def _onSubscribe(self, client:mqtt.Client, userdata:Any, mid:int, granted_qos:int) -> None: + """ Callback when the client successfulle subscribed to a topic. The topic + is also added to the internal topic list. + + Args: + client: The MQTT client. + userdata: User data. + mid: The message ID. + granted_qos: The QoS level. + """ # TODO doc, error check when not connected, not subscribed for t in self.subscribedTopics.values(): if t.mid == mid: @@ -281,9 +414,17 @@ def _onSubscribe(self, client:mqtt.Client, userdata:Any, mid:int, granted_qos:in def _onUnsubscribe(self, client:mqtt.Client, userdata:Any, mid:int) -> None: + """ Callback when the client successfulle unsubscribed from a topic. The topic + is also removed from the internal topic list. + """ # TODO doc, error check when not connected, not subscribed """ Callback when the client successfulle unsubscribed from a topic. The topic is also removed from the internal list. + + Args: + client: The MQTT client. + userdata: User data. + mid: The message ID. """ for t in self.subscribedTopics.values(): if t.mid == mid: @@ -293,7 +434,13 @@ def _onUnsubscribe(self, client:mqtt.Client, userdata:Any, mid:int) -> None: def _onMessage(self, client:mqtt.Client, userdata:Any, message:mqtt.MQTTMessage) -> None: - """ Handle a received message. Forward it to the apropriate handler callback (in a Thread) + """ Handle a received message. Forward it to the apropriate handler callback + (in another Thread). + + Args: + client: The MQTT client. + userdata: User data. + message: The received message. """ self.lowLevelLogging and self.messageHandler and self.messageHandler.logging(self, logging.DEBUG, f'MQTT: received topic:{message.topic}, payload:{message.payload}') for t in self.subscribedTopics.keys(): @@ -315,6 +462,11 @@ def _onMessage(self, client:mqtt.Client, userdata:Any, message:mqtt.MQTTMessage) def subscribeTopic(self, topic:str|list[str], callback:Optional[MQTTCallback] = None, **kwargs:Any) -> None: """ Add one or more MQTT topics to subscribe to. Add the topic(s) afterwards to the list of subscribed-to topics. + + Args: + topic: The topic(s) to subscribe to. Either a single topic or a list of topics. + callback: The callback function to call when a message is received for the topic. + kwargs: Additional arguments for the callback function. """ def _subscribe(topic:str) -> None: """ Handle subscription of a single topic. @@ -340,6 +492,9 @@ def unsubscribeTopic(self, topic:str|MQTTTopic) -> None: """ Unsubscribe from a topic. `topic` is either an MQTTTopic structure with a previously subscribed to topic, or a topic name, in which case it is searched for in the list of MQTTTopics. + + Args: + topic: The topic to unsubscribe from. """ if isinstance(topic, MQTTTopic): if topic.topic not in self.subscribedTopics: @@ -371,13 +526,19 @@ def unsubscribeTopic(self, topic:str|MQTTTopic) -> None: def isFullySubscribed(self) -> bool: """ Check whether the number managed subscriptions matches the number of currently subscribed-to topics. + + Return: + True if fully subscribed, False otherwise. """ return self.subscribedCount == len(self.subscribedTopics) - def publish(self, topic:str, data:bytes) -> None: """ Publish the message *data* with the topic *topic* with the MQTT broker. + + Args: + topic: The topic to publish to. + data: The data to publish. """ self.mqttClient.publish(topic, data) @@ -390,26 +551,48 @@ def publish(self, topic:str, data:bytes) -> None: def idToMQTT(id:str) -> str: """ Convert a oneM2M ID to an MQTT compatible path element. + + Args: + id: The oneM2M ID to convert. + + Returns: + The MQTT compatible path element. """ return f'{id.lstrip("/").replace("/", ":")}' def idToMQTTClientID(id:str, isCSE:Optional[bool] = True) -> str: """ Convert a oneM2M ID to an MQTT client ID. + + Args: + id: The oneM2M ID to convert. + isCSE: Whether the ID is a CSE-ID or an AE-ID. + + Returns: + The MQTT client ID. """ return f'{"C::" if isCSE else "A::"}{id.lstrip("/")}' + def mqttToId(mqttId:str, isCSE:Optional[bool] = True) -> Tuple[str, bool]: """ Convert an MQTT compatible path element to an ID. + + Args: + mqttId: The MQTT compatible path element to convert. + isCSE: Whether the ID is a CSE-ID or an AE-ID. + + Returns: + The ID and whether it is a CSE-ID or an AE-ID. """ - if mqttId.startswith('A:'): - isCSE = False - elif mqttId.startswith('C:'): - isCSE = True - else: - return None, False + match mqttId: + case x if x.startswith('A:'): + isCSE = False + case x if x.startswith('C:'): + isCSE = True + case _: + return None, False return mqttId[2:].replace(':', '/'), isCSE -# Type for an MQTT Callback MQTTCallback = Callable[[MQTTConnection, str, bytes], None] +""" Type for an MQTT Callback. """ diff --git a/acme/helpers/NetworkTools.py b/acme/helpers/NetworkTools.py index a529da17..fcba8bf0 100644 --- a/acme/helpers/NetworkTools.py +++ b/acme/helpers/NetworkTools.py @@ -29,8 +29,17 @@ def isValidateIpAddress(ip:str) -> bool: return True _allowedPart = re.compile("(?!-)[A-Z\d-]{1,63}(? bool: + """ Validate a host name. + + Args: + hostname: The host name to validate. + + Return: + True if the *hostname* is valid, or False otherwise. + """ if len(hostname) > 255: return False if hostname[-1] == '.': @@ -39,6 +48,14 @@ def isValidateHostname(hostname:str) -> bool: def isValidPort(port:str) -> bool: + """ Validate a port number. + + Args: + port: The port number to validate. + + Return: + True if *port* is valid, or False otherwise. + """ try: _port = int(port) except ValueError: @@ -47,6 +64,13 @@ def isValidPort(port:str) -> bool: def isTCPPortAvailable(port:int) -> bool: + """ Check whether a TCP port is available. + + Args: + port: The port to check. + + Return: + True if *port* is available, or False otherwise.""" try: with contextlib.closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s: s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) @@ -55,6 +79,7 @@ def isTCPPortAvailable(port:int) -> bool: return False return True + def getIPAddress(hostname:Optional[str] = None) -> str: """ Lookup and return the IP address for a host name. diff --git a/acme/helpers/OAuth.py b/acme/helpers/OAuth.py index a3c80728..c576031d 100644 --- a/acme/helpers/OAuth.py +++ b/acme/helpers/OAuth.py @@ -4,8 +4,8 @@ # (c) 2021 by Andreas Kraft # License: BSD 3-Clause License. See the LICENSE file for further details. # -# This module implements OAuth token retrieval. -# +""" This module implements OAuth token retrieval. +""" from __future__ import annotations from typing import Optional @@ -13,7 +13,10 @@ import requests Token = collections.namedtuple('Token', 'token expiration') +""" A named tuple for a token. """ + _expirationLeeway:float = 5.0 # 5 seconds leeway for token expiration +""" Leeway for token expiration. """ def getOAuthToken(serverURL:str, @@ -26,6 +29,16 @@ def getOAuthToken(serverURL:str, This function returns a new named tuple Token(token, expiration), or None in case of an error. The expiration is in epoch seconds. + + Args: + serverURL: The URL of the OAuth server. + clientID: The client ID. + clientSecret: The client secret. + token: Optional token to check if it is still valid. + kind: The kind of OAuth server. Currently only 'keycloak' is supported. + + Returns: + A Token tuple or None in case of an error. """ if not token: token = Token(token = None, expiration=0.0) diff --git a/acme/helpers/OrderedSet.py b/acme/helpers/OrderedSet.py index 543b7a09..d5229f0d 100644 --- a/acme/helpers/OrderedSet.py +++ b/acme/helpers/OrderedSet.py @@ -4,11 +4,10 @@ # (c) 2023 by Andreas Kraft # License: BSD 3-Clause License. See the LICENSE file for further details. # +""" Simple implementation of an ordered set.""" from typing import Any -""" Simple implementation of an ordered set.""" - class OrderedSet(list): """ Simple implementation of an ordered set. diff --git a/acme/helpers/TextTools.py b/acme/helpers/TextTools.py index c78d25a1..4875eb48 100644 --- a/acme/helpers/TextTools.py +++ b/acme/helpers/TextTools.py @@ -12,7 +12,7 @@ from typing import Optional, Any, Dict, Union, Callable, List -import base64, binascii, re, json +import base64, binascii, re, json, unicodedata _commentRegex = re.compile(r'(\".*?(? Optional[Any]: """ Find a structured *key* in the dictionary *dct*. If *key* does not exists then *default* is returned. @@ -288,6 +290,84 @@ def isNumber(string:Any) -> bool: return True + +_soundexReplacements = ( + ('BFPV', '1'), + ('CGJKQSXZ', '2'), + ('DT', '3'), + ('L', '4'), + ('MN', '5'), + ('R', '6'), + ) +""" Replacement characters for the soundex algorithm. """ + +def soundex(s:str, maxCount:Optional[int] = 4) -> str: + """ Convert a string to a Soundex value. + + Args: + s: The string to convert. + + Return: + The Soundex value as a string. + """ + + if not s: + return '' + + s = unicodedata.normalize('NFKD', s).upper() + + result = [s[0]] + count = 1 + + # find would-be replacement for first character + for lset, sub in _soundexReplacements: + if s[0] in lset: + last = sub + break + else: + last = None + + for ch in s[1:]: + for lset, sub in _soundexReplacements: + if ch in lset: + if sub != last: + result.append(sub) + count += 1 + last = sub + break + else: + if ch != 'H' and ch != 'W': + # leave last alone if middle letter is H or W + last = None + if count == maxCount: + break + + result += '0' * (4 - count) + return ''.join(result) + + +def soundsLike(s1:str, s2:str, maxCount:Optional[int] = 4) -> bool: + """ Compare two strings using the soundex algorithm. + + Args: + s1: First string to compare. + s2: Second string to compare. + maxCount: Maximum number of soundex result characters to compare. + + Return: + Boolean indicating the result of the comparison. + """ + # Remove 0 characters from the soundex result because they indicate a too short string + _s1 = soundex(s1, maxCount).replace('0', '') + _s2 = soundex(s2, maxCount).replace('0', '') + + # Only take the smaller number of characters of the soundex result into account + _l = min(len(_s1), len(_s2)) + return _s1[:_l] == _s2[:_l] + + return soundex(s1) == soundex(s2) + + def toHex(bts:bytes, toBinary:Optional[bool] = False, withLength:Optional[bool] = False) -> str: """ Print a byte string as hex output, similar to the "od" command. diff --git a/acme/helpers/TinyDBBetterTable.py b/acme/helpers/TinyDBBetterTable.py index b5fd6915..a6921086 100644 --- a/acme/helpers/TinyDBBetterTable.py +++ b/acme/helpers/TinyDBBetterTable.py @@ -11,7 +11,7 @@ from tinydb.table import Table class TinyDBBetterTable(Table): - """ This class is an addon to TinyDB's *Table* class. It removes some computations that are not + """ This class is an add-on to TinyDB's *Table* class. It removes some computations that are not necessary in ACME. - Document ID's are always strings. diff --git a/acme/helpers/TinyDBBufferedStorage.py b/acme/helpers/TinyDBBufferedStorage.py index ea5feb12..f9e1eb0b 100644 --- a/acme/helpers/TinyDBBufferedStorage.py +++ b/acme/helpers/TinyDBBufferedStorage.py @@ -16,16 +16,6 @@ class TinyDBBufferedStorage(JSONStorage): """ Storage driver class for TinyDB that implements a buffered disk write. - - Attributes: - __slots__: Define slots for instance variables. - _writeEvent: Event instance to notify when a write happened. - _writeDelay: Delay before writing the data to disk. - _shutdownLock: Internal lock when shutting down the database. - _running: Indicating that the database is open and in use. - _shutting_down: Indicator that the database is closing. This is different from `_running`. - _changed: Indicator that the write buffer is *dirty* and needs to be written. - _data: The actual database data, which is also strored in memory as a buffer. """ __slots__ = ( @@ -37,6 +27,8 @@ class TinyDBBufferedStorage(JSONStorage): '_changed', '_data', ) + """ Define slots for instance variables. """ + def __init__(self, path:str, create_dirs:bool = False, encoding:str = None, access_mode:str = 'r+', write_delay:int = 1, **kwargs:Any) -> None: """ Initialization of the storage driver. @@ -54,12 +46,19 @@ def __init__(self, path:str, create_dirs:bool = False, encoding:str = None, acce super().__init__(path, create_dirs, encoding, access_mode, **kwargs) self._shutdownLock = Thread.allocate_lock() + """ Internal lock when shutting down the database. """ self._writeEvent = Event() + """ Event instance to notify when a write happened. """ self._running = True + """ Indicating that the database is open and in use. """ self._shutting_down = False + """ Indicator that the database is closing. This is different from `_running`. """ self._changed = False + """ Indicator that the write buffer is *dirty* and needs to be written. """ self._writeDelay:int = write_delay + """ Time to wait before writing a changed database buffer, in seconds. """ self._data:Dict[str, Dict[str, Any]] = {} + """ The actual database data, which is also strored in memory as a buffer. """ # finishing init. Read the data for the first time self._data = super().read() diff --git a/acme/helpers/UDPServer.py b/acme/helpers/UDPServer.py new file mode 100644 index 00000000..94615e8b --- /dev/null +++ b/acme/helpers/UDPServer.py @@ -0,0 +1,243 @@ +# +# UdpServer.py +# +# (c) 2023 by Andreas Kraft, Yann Garcia +# License: BSD 3-Clause License. See the LICENSE file for further details. +# +# This module contains various utilty functions that are used from various +# modules and entities of the CSE. +# + +import threading +from typing import Callable, Any, Tuple +import socket +# Dtls +import ssl +from dtls.wrapper import wrap_server, wrap_client, DtlsSocket +import dtls.sslconnection as sslconnection + +from ..helpers.BackgroundWorker import BackgroundWorkerPool + +class UdpServer(object): + + __slots__ = ( + 'addr', + 'port', + 'socket', + 'listen_socket', + 'doListen', + 'received_data_callback', + 'useTLS', + 'verifyCertificate', + 'tlsVersion', + 'ssl_version', + 'privateKeyFile', + 'certificateFile', + 'privateKeyFile', + 'certificateFile', + 'logging', + 'ssl_ctx', + 'mtu' + ) + + def __init__(self, server_address:str, + port:str, + useDTLS:bool, + tlsVersion:str, + verifyCertificate:bool, + privateKeyFile:str, + certificateFile:str, + received_data_callback:Callable, + logging:Callable) -> None: + self.addr = server_address + self.port = port + self.socket:socket.socket = None # Client socket + self.listen_socket:socket.socket = None # Server socket + self.doListen = False + self.received_data_callback = received_data_callback + self.useTLS = useDTLS + self.tlsVersion = tlsVersion + self.ssl_version = { 'tls1.1': sslconnection.PROTOCOL_DTLSv1, + 'tls1.2': sslconnection.PROTOCOL_DTLSv1_2, + 'auto': sslconnection.PROTOCOL_DTLS }[self.tlsVersion] + self.verifyCertificate = verifyCertificate + + self.privateKeyFile = privateKeyFile + self.certificateFile = certificateFile + self.logging = logging + self.ssl_ctx:DtlsSocket = None + self.mtu = 512 #1500 TODO configurable + + + def listen(self, timeout:int = 5) -> None: # This does NOT return + self.listen_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) + self.listen_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + + + def _listen(listenSocket:Tuple[socket.socket, DtlsSocket]) -> None: + self.doListen = True + while self.doListen: + self.logging(f'UdpServer.listen: In loop: {str(self.doListen)}') + try: + data, client_address = listenSocket.recvfrom(4096) + self.logging(f'UdpServer.listen: client_address: {str(client_address)}') + if len(client_address) > 2: + client_address = (client_address[0], client_address[1]) + self.logging(f'UdpServer.listen: receive_datagram (1) - {str(data)}') + if data is not None: + self.logging(f'UdpServer.listen: receive_datagram - - {str(data)}') + BackgroundWorkerPool.runJob(lambda : self.received_data_callback(data, client_address), f'CoAP_{str(client_address)}') # TODO a better thread name + # t = threading.Thread(target=self.received_data_callback, args=(data, client_address)) + # t.setDaemon(True) + # t.start() + except socket.timeout: + continue + except Exception as e: + self.logging(f'UdpServer.listen (secure): {str(e)}') + continue + + + if self.useTLS == True: + + # Setup DTLS context + self.logging(f'Setup SSL context. Certfile: {self.certificateFile}, KeyFile: {self.privateKeyFile}, TLS version: {self.tlsVersion}') + self.ssl_ctx = wrap_server( + self.listen_socket, + keyfile = self.privateKeyFile, + certfile = self.certificateFile, + cert_reqs = ssl.CERT_NONE if self.verifyCertificate == False else ssl.CERT_REQUIRED, + ssl_version = self.ssl_version, + #ca_certs=self.caCertificateFile, + do_handshake_on_connect = True, + user_mtu = self.mtu, + ssl_logging = True, + cb_ignore_ssl_exception_in_handshake = None, + cb_ignore_ssl_exception_read = None, + cb_ignore_ssl_exception_write = None) + + # Initialize and start listening + self.ssl_ctx.bind((self.addr, self.port)) + self.ssl_ctx.settimeout(timeout) + self.ssl_ctx.listen(0) + _listen(self.ssl_ctx) # Does not return + # self.doListen = True + # while self.doListen: + # self.logging(f'UdpServer.listen: In loop: {str(self.doListen)}') + # try: + # data, client_address = self.ssl_ctx.recvfrom(4096) + # self.logging(f'UdpServer.listen: client_address: {str(client_address)}') + # if len(client_address) > 2: + # client_address = (client_address[0], client_address[1]) + # self.logging(f'UdpServer.listen: receive_datagram (1) - {str(data)}') + # if not data is None: + # self.logging(f'UdpServer.listen: receive_datagram - - {str(data)}') + # BackgroundWorkerPool.runJob(lambda : self.received_data_callback(data, client_address), f'CoAP_{str(client_address)}') # TODO a better thread name + # # t = threading.Thread(target=self.received_data_callback, args=(data, client_address)) + # # t.setDaemon(True) + # # t.start() + # except socket.timeout: + # continue + # except Exception as e: + # self.logging(f'UdpServer.listen (secure): {str(e)}') + # continue + + else: + # Initialize and start listening (non-secure) + self.listen_socket.bind((self.addr, self.port)) + self.listen_socket.settimeout(timeout) + _listen(self.listen_socket) # Does not return + + # self.doListen = True + # while self.doListen: + # try: + # data, client_address = self.listen_socket.recvfrom(4096) + # if len(client_address) > 2: + # client_address = (client_address[0], client_address[1]) + # Logging.log(f'UdpServer.listen: receive_datagram - {str(data)}') + # t = threading.Thread(target=self.received_data_callback, args=(data, client_address)) + # t.setDaemon(True) + # t.start() + # except socket.timeout: + # continue + # except Exception as e: + # Logging.logWarn(f'UdpServer.listen: {str(e)}') + # break + + + # # def _cb_ignore_listen_exception(self, exception, server): + # """ + # In the CoAP server listen method, different exceptions can arise from the DTLS stack. Depending on the type of exception, a + # continuation might not be possible, or a logging might be desirable. With this callback both needs can be satisfied. + # :param exception: What happened inside the DTLS stack + # :param server: Reference to the running CoAP server + # :return: True if further processing should be done, False processing should be stopped + # """ + # Logging.log('>>> UdpServer.listen: _cb_ignore_listen_exception: ' + str(exception)) + # if isinstance(exception, ssl.SSLError): + # # A client which couldn't verify the server tried to connect, continue but log the event + # if exception.errqueue[-1][0] == ssl.ERR_TLSV1_ALERT_UNKNOWN_CA: + # Logging.logWarn("Ignoring ERR_TLSV1_ALERT_UNKNOWN_CA from client %s" % ('unknown' if not hasattr(exception, 'peer') else str(exception.peer))) + # return True + # # ... and more ... + # return False + + # def _cb_ignore_write_exception(self, exception, client): + # """ + # In the CoAP client write method, different exceptions can arise from the DTLS stack. Depending on the type of exception, a + # continuation might not be possible, or a logging might be desirable. With this callback both needs can be satisfied. + # note: Default behaviour of CoAPthon without DTLS if no _cb_ignore_write_exception would be called is with "return True" + # :param exception: What happened inside the DTLS stack + # :param client: Reference to the running CoAP client + # :return: True if further processing should be done, False processing should be stopped + # """ + # Logging.log('>>> UdpServer.listen: _cb_ignore_write_exception: ' + str(exception)) + # return False + + # def _cb_ignore_read_exception(self, exception, client) -> bool: + # """ In the CoAP client read method, different exceptions can arise from the DTLS stack. Depending on the type of exception, a + # continuation might not be possible, or a logging might be desirable. With this callback both needs can be satisfied. + # note: Default behaviour of CoAPthon without DTLS if no _cb_ignore_read_exception would be called is with "return False" + + # Args: + # exception: What happened inside the DTLS stack. + # client: Reference to the running CoAP client. + + # Returns: + # True if further processing should be done, False processing should be stopped + # """ + # Logging.log('>>> UdpServer.listen: _cb_ignore_read_exception: ' + str(exception)) + # return False + +# def send(self, p_coapMessage:CoapMessageResponse) -> None: +# self._socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) +# self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + + def close(self) -> None: + self.doListen = False + if self.listen_socket: + if self.ssl_ctx: + self.ssl_ctx.unwrap() + self.listen_socket.close() + self.ssl_ctx = None + self.listen_socket = None + if self.socket: + self.socket.close() + self.socket = None + + + def sendTo(self, datagram): + self.logging(f'==> UdpServer.sendTo: /{str(datagram[0])} - {str(datagram[1])}') + try: + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) + if self.useTLS == True: + sock = wrap_client(sock, cert_reqs = ssl.CERT_REQUIRED, + keyfile = self.privateKeyFile, + certfile = self.certificateFile, + ca_certs = self.caCertificateFile, + do_handshake_on_connect = True, + ssl_version = self.ssl_version) + sock.sendto(datagram[0], datagram[1]) + except Exception as e: + self.logging(f'UdpServer.sendTo: {str(e)}') + finally: + sock.close() diff --git a/acme/resources/ACP.py b/acme/resources/ACP.py index 322edd73..91a62f3e 100644 --- a/acme/resources/ACP.py +++ b/acme/resources/ACP.py @@ -77,24 +77,15 @@ def validate(self, originator:Optional[str] = None, if not self.pvs: raise BAD_REQUEST('pvs must not be empty') - # Check acod - # TODO Is this still necessary? Check in resource validation? - def _checkAcod(acrs:list) -> None: - if acrs: - for acr in acrs: - if (acod := acr.get('acod')): - for each in acod: - if not (chty := each.get('chty')) or not isinstance(chty, list): - raise BAD_REQUEST('chty is mandatory in acod') - - _checkAcod(findXPath(dct, f'{ResourceTypes.ACPAnnc.tpe()}/pv/acr')) - _checkAcod(findXPath(dct, f'{ResourceTypes.ACPAnnc.tpe()}/pvs/acr')) - # Get types for the acor members. Ignore if not found # This is an optimization used later in case there is a group in acor riTyDict = {} def _getAcorTypes(pv:JSON) -> None: + """ Get the types of the acor members. + Args: + pv: The pv attribute to get the types for. + """ if pv: for acr in pv.get('acr', []): if (acor := acr.get('acor')): @@ -238,6 +229,15 @@ def checkSelfPermission(self, originator:str, requestedPermission:Permission) -> def _checkAcor(self, acor:list[str], originator:str) -> bool: + """ Check whether an originator is in the list of acor entries. + + Args: + acor: The list of acor entries. + originator: The originator to check. + + Return: + True if the originator is in the list of acor entries, False otherwise. + """ # Check originator if 'all' in acor or \ diff --git a/acme/resources/ACTR.py b/acme/resources/ACTR.py index ca7ef272..577471a2 100644 --- a/acme/resources/ACTR.py +++ b/acme/resources/ACTR.py @@ -7,13 +7,12 @@ # ResourceType: Action # -""" Action (ACTRA) resource type. """ +""" Action (ACTR) resource type. """ from __future__ import annotations -from typing import Optional, Tuple, Any, cast +from typing import Optional, Tuple -from ..etc.Types import AttributePolicyDict, EvalMode, ResourceTypes, Result, JSON, Permission, EvalCriteriaOperator -from ..etc.Types import BasicType +from ..etc.Types import AttributePolicyDict, EvalMode, ResourceTypes, JSON, Permission, EvalCriteriaOperator from ..etc.ResponseStatusCodes import ResponseException, BAD_REQUEST from ..etc.Utils import riFromID from ..helpers.TextTools import findXPath @@ -24,7 +23,7 @@ class ACTR(AnnounceableResource): - """ Action (ACTRA) resource type. """ + """ Action (ACTR) resource type. """ # Specify the allowed child-resource types _allowedChildResourceTypes:list[ResourceTypes] = [ ResourceTypes.DEPR, @@ -221,13 +220,13 @@ def _checkReferencedResources(self, originator:str, sri:str, orc:str) -> Tuple[R try: resSri = CSE.dispatcher.retrieveResourceWithPermission(sri, originator, Permission.RETRIEVE) except ResponseException as e: - raise BAD_REQUEST(dbg = e.dbg) + raise BAD_REQUEST(e.dbg) if orc is not None: try: resOrc = CSE.dispatcher.retrieveResourceWithPermission(orc, originator, Permission.RETRIEVE) except ResponseException as e: - raise BAD_REQUEST(dbg = e.dbg) + raise BAD_REQUEST(e.dbg) return (resSri, resOrc) diff --git a/acme/resources/AE.py b/acme/resources/AE.py index 462fc501..c8f4b1ff 100644 --- a/acme/resources/AE.py +++ b/acme/resources/AE.py @@ -28,6 +28,7 @@ class AE(AnnounceableResource): ResourceTypes.CRS, ResourceTypes.FCNT, ResourceTypes.GRP, + ResourceTypes.LCP, ResourceTypes.PCH, ResourceTypes.SMD, ResourceTypes.SUB, @@ -140,18 +141,19 @@ def validate(self, originator:Optional[str] = None, # check api attribute if not (api := self['api']) or len(api) < 2: # at least R|N + another char raise BAD_REQUEST('missing or empty attribute: "api"') - if api.startswith('N'): - pass # simple format - elif api.startswith('R'): - if len(api.split('.')) < 3: - raise BAD_REQUEST('wrong format for registered ID in attribute "api": to few elements') - - # api must normally begin with a lower-case "r", but it is allowed for release 2a and 3 - elif api.startswith('r'): - if (rvi := self.getRVI()) is not None and rvi not in ['2a', '3']: - raise BAD_REQUEST(L.logWarn('lower case "r" is only allowed for release versions "2a" and "3"')) - else: - raise BAD_REQUEST(L.logWarn(f'wrong format for ID in attribute "api": {api} (must start with "R" or "N")')) + + match api: + case x if x.startswith('N'): + pass # simple format + case x if x.startswith('R'): + if len(x.split('.')) < 3: + raise BAD_REQUEST('wrong format for registered ID in attribute "api": to few elements') + # api must normally begin with a lower-case "r", but it is allowed for release 2a and 3 + case x if x.startswith('r'): + if (rvi := self.getRVI()) is not None and rvi not in ['2a', '3']: + raise BAD_REQUEST(L.logWarn('lower case "r" is only allowed for release versions "2a" and "3"')) + case _: + raise BAD_REQUEST(L.logWarn(f'wrong format for ID in attribute "api": {api} (must start with "R" or "N")')) def deactivate(self, originator:str) -> None: diff --git a/acme/resources/AEAnnc.py b/acme/resources/AEAnnc.py index 4187500e..1d93eb4d 100644 --- a/acme/resources/AEAnnc.py +++ b/acme/resources/AEAnnc.py @@ -25,7 +25,8 @@ class AEAnnc(AnnouncedResource): ResourceTypes.FCNT, ResourceTypes.FCNTAnnc, ResourceTypes.GRP, - ResourceTypes.GRPAnnc, + ResourceTypes.GRPAnnc, + ResourceTypes.LCPAnnc, ResourceTypes.TS, ResourceTypes.TSAnnc ] diff --git a/acme/resources/AnnounceableResource.py b/acme/resources/AnnounceableResource.py index f8c7b182..9ac963fb 100644 --- a/acme/resources/AnnounceableResource.py +++ b/acme/resources/AnnounceableResource.py @@ -250,11 +250,14 @@ def _getAnnouncedAttributes(self, attributes:AttributePolicyDict) -> list[str]: if not (policy := attributes.get(attr)): continue - if policy.announcement == Announced.MA: - mandatory.append(attr) - elif policy.announcement == Announced.OA and attr in announceableAttributes: # only add optional attributes that are also in aa - optional.append(attr) - # else: just ignore Announced.NA + match policy.announcement: + case Announced.MA: + mandatory.append(attr) + case Announced.OA if attr in announceableAttributes: # only add optional attributes that are also in aa + optional.append(attr) + case Announced.NA: + # just ignore Announced.NA + pass return mandatory + optional diff --git a/acme/resources/CIN.py b/acme/resources/CIN.py index aa990241..37e63441 100644 --- a/acme/resources/CIN.py +++ b/acme/resources/CIN.py @@ -6,6 +6,8 @@ # # ResourceType: ContentInstance # +""" ContentInstance (CIN) resource type. +""" from __future__ import annotations from typing import Optional @@ -20,9 +22,12 @@ class CIN(AnnounceableResource): + """ ContentInstance resource type. + """ # Specify the allowed child-resource types _allowedChildResourceTypes:list[ResourceTypes] = [ ResourceTypes.SMD ] + """ The allowed child-resource types. """ # Attributes and Attribute policies for this Resource Class # Assigned during startup in the Importer @@ -44,6 +49,8 @@ class CIN(AnnounceableResource): 'daci': None, 'st': None, 'cr': None, + 'loc': None, + # Resource attributes 'cnf': None, @@ -55,6 +62,7 @@ class CIN(AnnounceableResource): 'dcnt': None, 'dgt': None } + """ Attributes and `AttributePolicy` for this resource type. """ def __init__(self, dct:Optional[JSON] = None, diff --git a/acme/resources/CINAnnc.py b/acme/resources/CINAnnc.py index b29e36ad..6e61d2c7 100644 --- a/acme/resources/CINAnnc.py +++ b/acme/resources/CINAnnc.py @@ -6,6 +6,7 @@ # # CIN : Announceable variant # +""" ContentInstance announced (CINA) resource type.""" from __future__ import annotations from typing import Optional @@ -14,9 +15,11 @@ class CINAnnc(AnnouncedResource): + """ ContentInstance announced (CINA) resource type. """ # Specify the allowed child-resource types _allowedChildResourceTypes:list[ResourceTypes] = [ ] + """ The allowed child-resource types. """ # Attributes and Attribute policies for this Resource Class # Assigned during startup in the Importer @@ -31,6 +34,7 @@ class CINAnnc(AnnouncedResource): 'et': None, 'lbl': None, 'ast': None, + 'loc': None, 'lnk': None, # Resource attributes @@ -40,6 +44,7 @@ class CINAnnc(AnnouncedResource): 'or': None, 'conr': None } + """ Attributes and `AttributePolicy` for this resource type. """ def __init__(self, dct:Optional[JSON] = None, diff --git a/acme/resources/CNT.py b/acme/resources/CNT.py index 5599baea..85ea71bd 100644 --- a/acme/resources/CNT.py +++ b/acme/resources/CNT.py @@ -6,6 +6,8 @@ # # ResourceType: Container # +""" Container (CNT) resource type. +""" from __future__ import annotations from typing import Optional, cast @@ -23,6 +25,7 @@ class CNT(ContainerResource): + """ Container resource type. """ _allowedChildResourceTypes = [ ResourceTypes.ACTR, ResourceTypes.CNT, @@ -33,6 +36,7 @@ class CNT(ContainerResource): ResourceTypes.TS, ResourceTypes.CNT_LA, ResourceTypes.CNT_OL ] + """ The allowed child-resource types. """ # Attributes and Attribute policies for this Resource Class # Assigned during startup in the Importer @@ -69,6 +73,7 @@ class CNT(ContainerResource): # EXPERIMENTAL 'subi': None, } + """ Attributes and `AttributePolicy` for this resource type. """ def __init__(self, dct:Optional[JSON] = None, @@ -76,10 +81,6 @@ def __init__(self, dct:Optional[JSON] = None, create:Optional[bool] = False) -> None: super().__init__(ResourceTypes.CNT, dct, pi, create = create) - # TODO optimize this - if Configuration.get('resource.cnt.enableLimits'): # Only when limits are enabled - self.setAttribute('mni', Configuration.get('resource.cnt.mni'), overwrite = False) - self.setAttribute('mbs', Configuration.get('resource.cnt.mbs'), overwrite = False) self.setAttribute('cni', 0, overwrite = False) self.setAttribute('cbs', 0, overwrite = False) self.setAttribute('st', 0, overwrite = False) @@ -89,7 +90,13 @@ def __init__(self, dct:Optional[JSON] = None, def activate(self, parentResource:Resource, originator:str) -> None: super().activate(parentResource, originator) - + + # Set the limits for this container if enabled + # TODO optimize this + if Configuration.get('resource.cnt.enableLimits'): # Only when limits are enabled + self.setAttribute('mni', Configuration.get('resource.cnt.mni'), overwrite = False) + self.setAttribute('mbs', Configuration.get('resource.cnt.mbs'), overwrite = False) + # register latest and oldest virtual resources L.isDebug and L.logDebug(f'Registering latest and oldest virtual resources for: {self.ri}') @@ -249,3 +256,20 @@ def _validateChildren(self) -> None: # End validating self.__validating = False + + def setLCPLink(self, lcpRi:str) -> None: + """ Set the link to the resource. This is called from the resource. + This also sets the link in the resource. + + Args: + lcpRi: The resource id of the resource. + """ + + self.setAttribute('li', lcpRi) + + # Also, set in the resource + if (latest := CSE.dispatcher.retrieveLocalResource(self.getLatestRI())) is not None: + latest.setLCPLink(lcpRi) + latest.dbUpdate() + + self.dbUpdate() \ No newline at end of file diff --git a/acme/resources/CNTAnnc.py b/acme/resources/CNTAnnc.py index bbd99988..760953da 100644 --- a/acme/resources/CNTAnnc.py +++ b/acme/resources/CNTAnnc.py @@ -6,6 +6,7 @@ # # CNT : Announceable variant # +""" Container announced (CNTA) resource type.""" from __future__ import annotations from typing import Optional @@ -15,6 +16,7 @@ class CNTAnnc(AnnouncedResource): + """ Container announced (CNTA) resource type. """ # Specify the allowed child-resource types _allowedChildResourceTypes = [ ResourceTypes.ACTR, @@ -28,6 +30,7 @@ class CNTAnnc(AnnouncedResource): ResourceTypes.SUB, ResourceTypes.TS, ResourceTypes.TSAnnc ] + """ The allowed child-resource types. """ # Attributes and Attribute policies for this Resource Class # Assigned during startup in the Importer @@ -55,6 +58,7 @@ class CNTAnnc(AnnouncedResource): 'or': None, 'disr': None } + """ Attributes and `AttributePolicy` for this resource type. """ def __init__(self, dct:Optional[JSON] = None, diff --git a/acme/resources/CNT_LA.py b/acme/resources/CNT_LA.py index c07173e2..1254e746 100644 --- a/acme/resources/CNT_LA.py +++ b/acme/resources/CNT_LA.py @@ -13,7 +13,7 @@ from __future__ import annotations from typing import Optional -from ..etc.Types import AttributePolicyDict, ResourceTypes, Result, JSON, CSERequest +from ..etc.Types import AttributePolicyDict, ResourceTypes, Result, JSON, CSERequest, LocationSource from ..etc.ResponseStatusCodes import ResponseStatusCode, OPERATION_NOT_ALLOWED, NOT_FOUND from ..services import CSE from ..services.Logging import Logging as L @@ -24,6 +24,9 @@ class CNT_LA(VirtualResource): """ This class implements the virtual resource for resources. """ + _li = '__li__' + """ Link to LCP from the parent resource. """ + _allowedChildResourceTypes:list[ResourceTypes] = [ ] """ A list of allowed child-resource types for this resource type. """ @@ -39,6 +42,9 @@ def __init__(self, dct:Optional[JSON] = None, pi:Optional[str] = None, create:Optional[bool] = False) -> None: super().__init__(ResourceTypes.CNT_LA, dct, pi, create = create, inheritACP = True, readOnly = True, rn = 'la') + + # Add to internal attributes to ignore in validation etc + self._addToInternalAttributes(self._li) def handleRetrieveRequest(self, request:Optional[CSERequest] = None, @@ -55,6 +61,13 @@ def handleRetrieveRequest(self, request:Optional[CSERequest] = None, The latest for the parent , or an error `Result`. """ L.isDebug and L.logDebug('Retrieving latest CIN from CNT') + + # Handle the request when the parent container's locationID is set + # This might create a new CIN + if (li := self.getLCPLink()) is not None: + if (result := self.retrieveLatestOldest(request, originator, ResourceTypes.CIN, oldest = False)) is not None: + CSE.location.handleLatestRetrieve(result.resource, li) + return self.retrieveLatestOldest(request, originator, ResourceTypes.CIN, oldest = False) @@ -107,3 +120,21 @@ def handleDeleteRequest(self, request:CSERequest, id:str, originator:str) -> Res raise NOT_FOUND('no instance for ') CSE.dispatcher.deleteLocalResource(resource, originator, withDeregistration = True) return Result(rsc = ResponseStatusCode.DELETED, resource = resource) + + + def getLCPLink(self) -> str: + """ Retrieve a `LCP` (LocationPolicy) resource's resource ID. + + Return: + The resource ID. + """ + return self[self._li] + + + def setLCPLink(self, lcpRi:str) -> None: + """ Assign a resource ID of a `LCP` (LocationPolicy) resource to the latest resource. + + Args: + lcpRi: The resource ID of an `LCP` resource. + """ + self.setAttribute(self._li, lcpRi, overwrite = True) diff --git a/acme/resources/CRS.py b/acme/resources/CRS.py index 42b40540..9e7559ff 100644 --- a/acme/resources/CRS.py +++ b/acme/resources/CRS.py @@ -13,6 +13,7 @@ from typing import Optional, cast from copy import deepcopy + from ..etc.Utils import pureResource, toSPRelative, csiFromSPRelative, compareIDs from ..helpers.TextTools import findXPath, setXPath from ..helpers.ResourceSemaphore import criticalResourceSection, inCriticalSection @@ -33,7 +34,7 @@ class CRS(Resource): _sudRI = '__sudRI__' # Reference when the resource is been deleted because of the deletion of a rrat or srat subscription. Usually empty # Specify the allowed child-resource types - _allowedChildResourceTypes:list[ResourceTypes] = [ ] + _allowedChildResourceTypes:list[ResourceTypes] = [ ResourceTypes.SCH ] # Attributes and Attribute policies for this Resource Class # Assigned during startup in the Importer @@ -117,10 +118,7 @@ def activate(self, parentResource:Resource, originator:str) -> None: if self.twt == TimeWindowType.PERIODICWINDOW: CSE.notification.startCRSPeriodicWindow(self.ri, self.tws, self._countSubscriptions(), self.eem) - # nsi is at least an empty list if nse is present, otherwise it must not be present - if self.nse is not None: - self.setAttribute('nsi', [], overwrite = False) - CSE.notification.validateAndConstructNotificationStatsInfo(self) + # "nsi" will be added later during the first stat recording # Set twi default if not present self.setAttribute('eem', EventEvaluationMode.ALL_EVENTS_PRESENT.value, False) @@ -179,10 +177,11 @@ def update(self, dct:Optional[JSON] = None, def deactivate(self, originator:str) -> None: # Deactivate time windows - if self.twt == TimeWindowType.PERIODICWINDOW: - CSE.notification.stopCRSPeriodicWindow(self.ri) - elif self.twt == TimeWindowType.SLIDINGWINDOW: - CSE.notification.stopCRSSlidingWindow(self.ri) + match self.twt: + case TimeWindowType.PERIODICWINDOW: + CSE.notification.stopCRSPeriodicWindow(self.ri) + case TimeWindowType.SLIDINGWINDOW: + CSE.notification.stopCRSSlidingWindow(self.ri) # Delete rrat and srat subscriptions self._deleteSubscriptions(originator) @@ -219,6 +218,15 @@ def validate(self, originator:Optional[str] = None, raise BAD_REQUEST(L.logDebug(f'eem = {eem} is not allowed with twt = SLIDINGWINDOW')) + def childWillBeAdded(self, childResource: Resource, originator: str) -> None: + super().childWillBeAdded(childResource, originator) + if childResource.ty == ResourceTypes.SCH: + if (rn := childResource._originalDict.get('rn')) is None: + childResource.setResourceName('notificationSchedule') + elif rn != 'notificationSchedule': + raise BAD_REQUEST(L.logDebug(f'rn of under must be "notificationSchedule"')) + + def handleNotification(self, request:CSERequest, originator:str) -> None: """ Handle a notification request to a CRS resource. @@ -409,8 +417,9 @@ def _deleteSubscriptionForRrat(self, subRI:str, originator:str) -> None: L.isDebug and L.logDebug(f'Deleting : {subRI}') try: CSE.dispatcher.deleteResource(subRI, originator = originator) - except NOT_FOUND as e: - pass # ignore not found resources here + except Exception as e: + # ignore not found resources here + L.logWarn(f'Cannot delete subscription for {subRI}: {e}') # To be sure: Set the RI in the rrats list to None _rrats = self.rrats @@ -424,8 +433,8 @@ def _deleteFromSubscriptionsForSrat(self, srat:str, originator:str) -> None: if (subRI := _subRIs.get(srat)) is not None: try: resource = CSE.dispatcher.retrieveResource(subRI, originator = originator) - except: - raise BAD_REQUEST(L.logWarn(f'Cannot retrieve subscription for {srat} uri: {subRI}')) + except Exception as e: + L.logWarn(f'Cannot retrieve subscription for {subRI}: {e}') newDct:JSON = { 'm2m:sub': {} } # new request dct @@ -447,7 +456,7 @@ def _deleteFromSubscriptionsForSrat(self, srat:str, originator:str) -> None: try: resource = CSE.dispatcher.updateResourceFromDict(newDct, subRI, originator = originator, resource = resource) except ResponseException as e: - raise BAD_REQUEST(L.logWarn(f'Cannot update subscription for {srat} uri: {subRI}: {e.dbg}')) + L.logWarn(f'Cannot update subscription for {srat} uri: {subRI}: {e} {e.dbg}') del _subRIs[srat] self.setAttribute(self._subSratRIs, _subRIs) diff --git a/acme/resources/CSEBase.py b/acme/resources/CSEBase.py index acd83615..eda5fc08 100644 --- a/acme/resources/CSEBase.py +++ b/acme/resources/CSEBase.py @@ -6,6 +6,7 @@ # # ResourceType: CSEBase # +""" CSEBase (CSEBase) resource type. """ from __future__ import annotations from typing import Optional @@ -22,6 +23,7 @@ # TODO notificationCongestionPolicy class CSEBase(AnnounceableResource): + """ CSEBase (CSEBase) resource type. """ # Specify the allowed child-resource types _allowedChildResourceTypes = [ ResourceTypes.ACP, @@ -32,12 +34,15 @@ class CSEBase(AnnounceableResource): ResourceTypes.CNT, ResourceTypes.FCNT, ResourceTypes.GRP, + ResourceTypes.LCP, ResourceTypes.NOD, ResourceTypes.REQ, + ResourceTypes.SCH, ResourceTypes.SUB, ResourceTypes.TS, ResourceTypes.TSB, ResourceTypes.CSEBaseAnnc ] + """ The allowed child-resource types. """ # Attributes and Attribute policies for this Resource Class # Assigned during startup in the Importer @@ -65,6 +70,8 @@ class CSEBase(AnnounceableResource): 'csz': None, 'ctm': None, } + """ Represent a dictionary of attribute policies used in validation. """ + def __init__(self, dct:JSON, create:Optional[bool] = False) -> None: @@ -129,6 +136,12 @@ def willBeRetrieved(self, originator:str, self.setAttribute('srv', CSE.supportedReleaseVersions) + def childWillBeAdded(self, childResource: Resource, originator: str) -> None: + super().childWillBeAdded(childResource, originator) + if childResource.ty == ResourceTypes.SCH: + if CSE.dispatcher.retrieveDirectChildResources(self.ri, ResourceTypes.SCH): + raise BAD_REQUEST('Only one resource is allowed for the CSEBase') + def getCSE() -> CSEBase: # Actual: CSEBase Resource """ Return the resource. @@ -136,5 +149,4 @@ def getCSE() -> CSEBase: # Actual: CSEBase Resource Return: resource. """ - #return CSE.dispatcher.retrieveResource(CSE.cseRi) return resourceFromCSI(CSE.cseCsi) diff --git a/acme/resources/CSEBaseAnnc.py b/acme/resources/CSEBaseAnnc.py index 13d27a08..aab3e984 100644 --- a/acme/resources/CSEBaseAnnc.py +++ b/acme/resources/CSEBaseAnnc.py @@ -6,6 +6,7 @@ # # CNT : Announceable variant # +""" CSEBase announced (CSEBaseA) resource type. """ from __future__ import annotations from typing import Optional @@ -23,7 +24,9 @@ class CSEBaseAnnc(AnnouncedResource): ResourceTypes.CNTAnnc, ResourceTypes.FCNTAnnc, ResourceTypes.GRPAnnc, + ResourceTypes.LCPAnnc, ResourceTypes.NODAnnc, + ResourceTypes.SCHAnnc, ResourceTypes.SUB, ResourceTypes.TSAnnc, ResourceTypes.TSBAnnc ] diff --git a/acme/resources/CSR.py b/acme/resources/CSR.py index 35d2fc8b..3e44cc7a 100644 --- a/acme/resources/CSR.py +++ b/acme/resources/CSR.py @@ -36,7 +36,8 @@ class CSR(AnnounceableResource): ResourceTypes.FCNTAnnc, ResourceTypes.FCI, ResourceTypes.GRP, - ResourceTypes.GRPAnnc, + ResourceTypes.GRPAnnc, + ResourceTypes.LCPAnnc, ResourceTypes.MGMTOBJAnnc, ResourceTypes.NODAnnc, ResourceTypes.PCH, diff --git a/acme/resources/CSRAnnc.py b/acme/resources/CSRAnnc.py index 2699b7da..009f42dd 100644 --- a/acme/resources/CSRAnnc.py +++ b/acme/resources/CSRAnnc.py @@ -20,22 +20,24 @@ class CSRAnnc(AnnouncedResource): # Specify the allowed child-resource types _allowedChildResourceTypes = [ ResourceTypes.ACTR, ResourceTypes.ACTRAnnc, + ResourceTypes.ACP, + ResourceTypes.ACPAnnc, + ResourceTypes.AEAnnc, ResourceTypes.CNT, ResourceTypes.CNTAnnc, ResourceTypes.CINAnnc, + ResourceTypes.CSRAnnc, ResourceTypes.FCNT, ResourceTypes.FCNTAnnc, ResourceTypes.GRP, ResourceTypes.GRPAnnc, - ResourceTypes.ACP, - ResourceTypes.ACPAnnc, + ResourceTypes.LCPAnnc, + ResourceTypes.MGMTOBJAnnc, + ResourceTypes.NODAnnc, + ResourceTypes.SCHAnnc, ResourceTypes.SUB, ResourceTypes.TS, ResourceTypes.TSAnnc, - ResourceTypes.CSRAnnc, - ResourceTypes.MGMTOBJAnnc, - ResourceTypes.NODAnnc, - ResourceTypes.AEAnnc, ResourceTypes.TSB, ResourceTypes.TSBAnnc ] diff --git a/acme/resources/DEPR.py b/acme/resources/DEPR.py index 0a7ec886..8353e3c2 100644 --- a/acme/resources/DEPR.py +++ b/acme/resources/DEPR.py @@ -65,7 +65,7 @@ def activate(self, parentResource: Resource, originator: str) -> None: try: resRri = CSE.dispatcher.retrieveResourceWithPermission(self.rri, originator, Permission.RETRIEVE) except ResponseException as e: - raise BAD_REQUEST(dbg = e.dbg) + raise BAD_REQUEST(e.dbg) # Check existence of referenced subject attribute in the referenced resource. sbjt = self.evc['sbjt'] @@ -87,7 +87,7 @@ def update(self, dct: JSON = None, try: resRri = CSE.dispatcher.retrieveResourceWithPermission(self.getFinalResourceAttribute('rri', dct), originator, Permission.RETRIEVE) except ResponseException as e: - raise BAD_REQUEST(dbg = e.dbg) + raise BAD_REQUEST(e.dbg) if (evc := findXPath(dct, 'm2m:depr/evc')) is not None: diff --git a/acme/resources/FCI.py b/acme/resources/FCI.py index 544cb2b0..6d02c55e 100644 --- a/acme/resources/FCI.py +++ b/acme/resources/FCI.py @@ -31,6 +31,7 @@ class FCI(Resource): 'ct': None, 'et': None, 'lbl': None, + 'loc': None, # Resource attributes 'cs': None, diff --git a/acme/resources/FCNT.py b/acme/resources/FCNT.py index 2300b9f5..4522dba9 100644 --- a/acme/resources/FCNT.py +++ b/acme/resources/FCNT.py @@ -236,7 +236,7 @@ def _validateChildren(self, originator:str, def flexContainerInstances(self) -> list[Resource]: """ Get all flexContainerInstances of a resource and return a sorted (by ct) list """ - return sorted(CSE.dispatcher.directChildResources(self.ri, ResourceTypes.FCI), key = lambda x: x.ct) # type:ignore[no-any-return] + return sorted(CSE.dispatcher.retrieveDirectChildResources(self.ri, ResourceTypes.FCI), key = lambda x: x.ct) # type:ignore[no-any-return] # Add a new FlexContainerInstance for this flexContainer @@ -306,10 +306,10 @@ def _removeLaOl(self) -> None: L.isDebug and L.logDebug(f'De-registering latest and oldest virtual resources for: {self.ri}') # remove latest - if len(chs := CSE.dispatcher.directChildResources(self.ri, ResourceTypes.FCNT_LA)) == 1: # type:ignore[no-any-return] + if len(chs := CSE.dispatcher.retrieveDirectChildResources(self.ri, ResourceTypes.FCNT_LA)) == 1: # type:ignore[no-any-return] CSE.dispatcher.deleteLocalResource(chs[0]) # ignore errors # remove oldest - if len(chs := CSE.dispatcher.directChildResources(self.ri, ResourceTypes.FCNT_OL)) == 1: # type:ignore[no-any-return] + if len(chs := CSE.dispatcher.retrieveDirectChildResources(self.ri, ResourceTypes.FCNT_OL)) == 1: # type:ignore[no-any-return] CSE.dispatcher.deleteLocalResource(chs[0]) # ignore errors self.setAttribute(self._hasFCI, False) @@ -319,7 +319,7 @@ def _removeFCIs(self) -> None: """ Remove the FCI childResources. """ L.isDebug and L.logDebug(f'Removing FCI child resources for: {self.ri}') - chs = CSE.dispatcher.directChildResources(self.ri, ty = ResourceTypes.FCI) + chs = CSE.dispatcher.retrieveDirectChildResources(self.ri, ty = ResourceTypes.FCI) for ch in chs: # self.childRemoved(r, originator) # It should not be necessary to notify self at this point. CSE.dispatcher.deleteLocalResource(ch, parentResource = self) diff --git a/acme/resources/Factory.py b/acme/resources/Factory.py index caa7ebf7..98ebd332 100644 --- a/acme/resources/Factory.py +++ b/acme/resources/Factory.py @@ -15,7 +15,7 @@ from ..etc.Types import ResourceTypes, addResourceFactoryCallback, FactoryCallableT from ..etc.ResponseStatusCodes import BAD_REQUEST -from ..etc.Types import Result, JSON +from ..etc.Types import JSON from ..etc.Utils import pureResource from ..etc.Constants import Constants from ..services.Logging import Logging as L @@ -49,6 +49,8 @@ from ..resources.GRP import GRP from ..resources.GRPAnnc import GRPAnnc from ..resources.GRP_FOPT import GRP_FOPT +from ..resources.LCP import LCP +from ..resources.LCPAnnc import LCPAnnc from ..resources.NOD import NOD from ..resources.NODAnnc import NODAnnc from ..resources.PCH import PCH @@ -57,6 +59,8 @@ from ..resources.SUB import SUB from ..resources.SMD import SMD from ..resources.SMDAnnc import SMDAnnc +from ..resources.SCH import SCH +from ..resources.SCHAnnc import SCHAnnc from ..resources.TS import TS from ..resources.TSAnnc import TSAnnc from ..resources.TS_LA import TS_LA @@ -124,11 +128,15 @@ addResourceFactoryCallback(ResourceTypes.GRP, GRP, lambda dct, tpe, pi, create : GRP(dct, pi = pi, create = create)) addResourceFactoryCallback(ResourceTypes.GRPAnnc, GRPAnnc, lambda dct, tpe, pi, create : GRPAnnc(dct, pi = pi, create = create)) addResourceFactoryCallback(ResourceTypes.GRP_FOPT, GRP_FOPT, lambda dct, tpe, pi, create : GRP_FOPT(dct, pi = pi, create = create)) +addResourceFactoryCallback(ResourceTypes.LCP, LCP, lambda dct, tpe, pi, create : LCP(dct, pi = pi, create = create)) +addResourceFactoryCallback(ResourceTypes.LCPAnnc, LCPAnnc, lambda dct, tpe, pi, create : LCPAnnc(dct, pi = pi, create = create)) addResourceFactoryCallback(ResourceTypes.NOD, NOD, lambda dct, tpe, pi, create : NOD(dct, pi = pi, create = create)) addResourceFactoryCallback(ResourceTypes.NODAnnc, NODAnnc, lambda dct, tpe, pi, create : NODAnnc(dct, pi = pi, create = create)) addResourceFactoryCallback(ResourceTypes.PCH, PCH, lambda dct, tpe, pi, create : PCH(dct, pi = pi, create = create)) addResourceFactoryCallback(ResourceTypes.PCH_PCU, PCH_PCU, lambda dct, tpe, pi, create : PCH_PCU(dct, pi = pi, create = create)) addResourceFactoryCallback(ResourceTypes.REQ, REQ, lambda dct, tpe, pi, create : REQ(dct, pi = pi, create = create)) +addResourceFactoryCallback(ResourceTypes.SCH, SCH, lambda dct, tpe, pi, create : SCH(dct, pi = pi, create = create)) +addResourceFactoryCallback(ResourceTypes.SCHAnnc, SCHAnnc, lambda dct, tpe, pi, create : SCHAnnc(dct, pi = pi, create = create)) addResourceFactoryCallback(ResourceTypes.SMD, SMD, lambda dct, tpe, pi, create : SMD(dct, pi = pi, create = create)) addResourceFactoryCallback(ResourceTypes.SMDAnnc, SMDAnnc, lambda dct, tpe, pi, create : SMDAnnc(dct, pi = pi, create = create)) addResourceFactoryCallback(ResourceTypes.SUB, SUB, lambda dct, tpe, pi, create : SUB(dct, pi = pi, create = create)) @@ -227,14 +235,16 @@ def resourceFromDict(resDict:Optional[JSON] = {}, # Determine a factory and call it factory:FactoryCallableT = None - if typ == ResourceTypes.MGMTOBJ: # for - # mgd = resDict['mgd'] if 'mgd' in resDict else None # Identify mdg in - factory = ResourceTypes(resDict['mgd']).resourceFactory() - elif typ == ResourceTypes.MGMTOBJAnnc: # for - # mgd = resDict['mgd'] if 'mgd' in resDict else None # Identify mdg in - factory = ResourceTypes(resDict['mgd']).announced().resourceFactory() - else: - factory = typ.resourceFactory() + match typ: + case ResourceTypes.MGMTOBJ: + # mgd = resDict['mgd'] if 'mgd' in resDict else None # Identify mdg in + factory = ResourceTypes(resDict['mgd']).resourceFactory() + case ResourceTypes.MGMTOBJAnnc: + # mgd = resDict['mgd'] if 'mgd' in resDict else None # Identify mdg in + factory = ResourceTypes(resDict['mgd']).announced().resourceFactory() + case _: + factory = typ.resourceFactory() + if factory: return cast(Resource, factory(resDict, tpe, pi, create)) diff --git a/acme/resources/GRPAnnc.py b/acme/resources/GRPAnnc.py index feb3bd21..f5c087fd 100644 --- a/acme/resources/GRPAnnc.py +++ b/acme/resources/GRPAnnc.py @@ -36,7 +36,6 @@ class GRPAnnc(AnnouncedResource): 'acpi':None, 'daci': None, 'ast': None, - 'loc': None, 'lnk': None, # Resource attributes diff --git a/acme/resources/LCP.py b/acme/resources/LCP.py new file mode 100644 index 00000000..e4fa1511 --- /dev/null +++ b/acme/resources/LCP.py @@ -0,0 +1,204 @@ + # +# LCP.py +# +# (c) 2023 by Andreas Kraft +# License: BSD 3-Clause License. See the LICENSE file for further details. +# +# ResourceType: LocationPolicy +# + +""" LocationPolicy (LCP) resource type. """ + +from __future__ import annotations +from typing import Optional + +from ..etc.Constants import Constants as C +from ..etc.Types import AttributePolicyDict, ResourceTypes, JSON, LocationSource, GeofenceEventCriteria, LocationUpdateEventCriteria, LocationInformationType +from ..services.Logging import Logging as L +from ..services import CSE +from ..services.Configuration import Configuration +from ..resources.Resource import Resource +from ..resources.AnnounceableResource import AnnounceableResource +from ..resources import Factory +from ..etc.ResponseStatusCodes import BAD_REQUEST, NOT_IMPLEMENTED +from ..etc.GeoTools import getGeoPolygon + +# TODO add annc +# TODO add to supported resources of CSE + +class LCP(AnnounceableResource): + """ LocationPolicy (LCP) resource type. """ + + _gta = '__gta__' + + # Specify the allowed child-resource types + _allowedChildResourceTypes:list[ResourceTypes] = [ ResourceTypes.SUB ] + """ The allowed child-resource types. """ + + # Attributes and Attribute policies for this Resource Class + # Assigned during startup in the Importer + _attributes:AttributePolicyDict = { + # Common and universal attributes + 'rn': None, + 'ty': None, + 'ri': None, + 'pi': None, + 'ct': None, + 'lt': None, + 'lbl': None, + 'acpi':None, + 'et': None, + 'daci': None, + 'cstn': None, + 'at': None, + 'aa': None, + 'ast': None, + + # Resource attributes + 'los': None, + 'lit': None, + 'lou': None, + 'lot': None, + 'lor': None, + 'loi': None, + 'lon': None, + 'lost': None, + 'gta': None, + 'gec': None, + 'aid': None, + 'rlkl': None, + 'luec': None + } + """ Attributes and `AttributePolicy` for this resource type. """ + + + def __init__(self, dct:Optional[JSON] = None, pi:Optional[str] = None, create:Optional[bool] = False) -> None: + super().__init__(ResourceTypes.LCP, dct, pi, create = create) + + # Add to internal attributes to ignore in validation etc + self._addToInternalAttributes(self._gta) + + + def activate(self, parentResource: Resource, originator: str) -> None: + super().activate(parentResource, originator) + + # Creating extra resource + # Set the li attribute to the LCP's ri afterwards + _cnt:JSON = { + 'mni': Configuration.get('resource.lcp.mni'), + 'mbs': Configuration.get('resource.lcp.mbs'), + } + if self.lon is not None: # add container's resourcename if provided + _cnt['rn'] = self.lon + + container = Factory.resourceFromDict(_cnt, + pi = parentResource.ri, + ty = ResourceTypes.CNT) + try: + container = CSE.dispatcher.createLocalResource(container, parentResource, originator) + except Exception as e: + L.isWarn and L.logWarn(f'Could not create container for LCP: {e}') + raise BAD_REQUEST(f'Could not create container for LCP. Resource name: {self.lon} already exists?') + # set internal attributes afterwards (after validation) + container.setLCPLink(self.ri) + + # Set backlink to container in LCP + self.setAttribute('loi', container.ri) + + + # Register the LCP for periodic positioning procedure + CSE.location.addLocationPolicy(self) + + + + # If the value of locationUpdatePeriod attribute is updated to 0 or NULL, + # the Hosting CSE shall stop periodical positioning procedure and perform the procedure when + # Originator retrieves the resource of the linked resource. See clause 10.2.9.6 and clause 10.2.9.7 for more detail. + + # TODO add event for latest + location retrieval + + # If the value of locationUpdatePeriod attribute is updated to bigger than 0 (e.g. 1 hour) from 0 or NULL, + # the Hosting CSE shall start periodical positioning procedure. + + + def updated(self, dct: JSON | None = None, originator: str | None = None) -> None: + super().updated(dct, originator) + + # update the location policy handling + CSE.location.updateLocationPolicy(self) + + + def deactivate(self, originator:str) -> None: + # Delete the extra resource + if self.loi is not None: + CSE.dispatcher.deleteResource(self.loi, originator) + CSE.location.removeLocationPolicy(self) + super().deactivate(originator) + + + def validate(self, originator: str | None = None, dct: JSON | None = None, parentResource: Resource | None = None) -> None: + + def validateNetworkBasedAttributes() -> None: + """ Validate the Network_based attributes. """ + + if self.getFinalResourceAttribute('lot', dct) is not None: # locationTargetID + raise BAD_REQUEST(f'Attribute lot is only allowed if los is Network_based.') + if self.getFinalResourceAttribute('aid', dct) is not None: # authID + raise BAD_REQUEST(f'Attribute aid is only allowed if los is Network_based.') + if self.getFinalResourceAttribute('lor', dct) is not None: # locationServer + raise BAD_REQUEST(f'Attribute aid is only allowed if los is Network_based.') + if self.getFinalResourceAttribute('rlkl', dct) is not None: # retrieveLastKnownLocation + raise BAD_REQUEST(f'Attribute rlkl is only allowed if los is Network_based.') + if self.getFinalResourceAttribute('luec', dct) is not None: # loocationUpdateEventCriteria + raise BAD_REQUEST(f'Attribute luec is only allowed if los is Network_based.') + + super().validate(originator, dct, parentResource) + + # Error for unsupported location source types + los = self.getFinalResourceAttribute('los', dct) # locationSource + if los in [ LocationSource.Network_based, LocationSource.Sharing_based]: + raise NOT_IMPLEMENTED(L.logWarn(f'Unsupported LocationSource: {LocationSource(self.los)}')) + + + # Check the various locationSource types + match los: + case LocationSource.Network_based | LocationSource.Sharing_based: + raise NOT_IMPLEMENTED(L.logWarn(f'Unsupported LocationSource: {LocationSource(los)}')) + case LocationSource.Device_based: + validateNetworkBasedAttributes() + + # Always set the lost to an empty string as long as the locationSource is not Network_based + self.setAttribute('lost', '') + + # Validate the polygon + if (gta := self.gta) is not None: + if (_gta := getGeoPolygon(gta)) is None: + raise BAD_REQUEST('Invalid geographicalTargetArea. Must be a valid geoJSON polygon.') + self.setAttribute(self._gta, _gta) # store the geoJSON polygon in the internal attribute + + + + # TODO store lou to _lou + + + + # TODO more warnings for unsupported attributes (mainly for geo server) + + +# TODo geographicalTargetArea : What if not closed? +# TODO geofenceEventCriteria should be a list of GeofenceEventCriteria +# TODO retrieveLastKnownLocation: Indicates if the Hosting CSE shall retrieve the last known location when the Hosting CSE fails to retrieve the latest location WTF`????? +# TODO: locationUpdateEventCriteria Not supported + + + +#Procedure for resource that stores location information + +# After the resource that stores the location information is created, each instance of location information shall be stored +# in the different resources. In order to store the location information in the resource, +# the Hosting CSE firstly checks the defined locationUpdatePeriod attribute. +# If a valid period value is set for this attribute, the Hosting CSE shall perform the positioning procedures as defined by locationUpdatePeriod +# in the associated resource and stores the results (e.g. position fix and uncertainty) in the resource +# under the created resource. However, if no value (e.g. null or zero) is set and locationUpdateEventCriteria is absent, +# the positioning procedure shall be performed when an Originator requests to retrieve the resource of the +# resource and the result shall be stored as a resource under the resource. \ No newline at end of file diff --git a/acme/resources/LCPAnnc.py b/acme/resources/LCPAnnc.py new file mode 100644 index 00000000..6a4a5a35 --- /dev/null +++ b/acme/resources/LCPAnnc.py @@ -0,0 +1,66 @@ +# +# LCPAnnc.py +# +# (c) 2023 by Andreas Kraft +# License: BSD 3-Clause License. See the LICENSE file for further details. +# +# ResourceType: LocationPolicy Announced +# + +""" LocationPolicy Announced(LCPA) resource type. """ + +from __future__ import annotations +from typing import Optional + +from ..etc.Types import AttributePolicyDict, ResourceTypes, JSON +from .AnnouncedResource import AnnouncedResource + + +class LCPAnnc(AnnouncedResource): + """ LocationPolicy Announced (LCPA) resource type. """ + + # Specify the allowed child-resource types + _allowedChildResourceTypes:list[ResourceTypes] = [ ] + """ The allowed child-resource types. """ + + # Attributes and Attribute policies for this Resource Class + # Assigned during startup in the Importer + _attributes:AttributePolicyDict = { + # Common and universal attributes + 'rn': None, + 'ty': None, + 'ri': None, + 'pi': None, + 'ct': None, + 'lt': None, + 'et': None, + 'lbl': None, + 'acpi':None, + 'daci': None, + 'lnk': None, + 'ast': None, + + # Resource attributes + 'los': None, + 'lit': None, + 'lou': None, + 'lot': None, + 'lor': None, + 'loi': None, + 'lon': None, + 'lost': None, + 'gta': None, + 'gec': None, + 'aid': None, + 'rlkl': None, + 'luec': None + + } + """ Attributes and `AttributePolicy` for this resource type. """ + + + def __init__(self, dct:Optional[JSON] = None, + pi:Optional[str] = None, + create:Optional[bool] = False) -> None: + super().__init__(ResourceTypes.LCPAnnc, dct, pi = pi, create = create) + diff --git a/acme/resources/NOD.py b/acme/resources/NOD.py index 8c0d889e..afbbd7c7 100644 --- a/acme/resources/NOD.py +++ b/acme/resources/NOD.py @@ -25,6 +25,7 @@ class NOD(AnnounceableResource): # Specify the allowed child-resource types _allowedChildResourceTypes = [ ResourceTypes.ACTR, ResourceTypes.MGMTOBJ, + ResourceTypes.SCH, ResourceTypes.SMD, ResourceTypes.SUB ] diff --git a/acme/resources/NODAnnc.py b/acme/resources/NODAnnc.py index c8e30c6a..d8edbcb3 100644 --- a/acme/resources/NODAnnc.py +++ b/acme/resources/NODAnnc.py @@ -1,10 +1,10 @@ # -# GRPAnnc.py +# NODAnnc.py # # (c) 2020 by Andreas Kraft # License: BSD 3-Clause License. See the LICENSE file for further details. # -# GRP : Announceable variant +# NODAnnc : Announceable variant # from __future__ import annotations @@ -20,6 +20,7 @@ class NODAnnc(AnnouncedResource): _allowedChildResourceTypes = [ ResourceTypes.ACTR, ResourceTypes.ACTRAnnc, ResourceTypes.MGMTOBJAnnc, + ResourceTypes.SCHAnnc, ResourceTypes.SUB ] # Attributes and Attribute policies for this Resource Class @@ -37,7 +38,6 @@ class NODAnnc(AnnouncedResource): 'acpi':None, 'daci': None, 'ast': None, - 'loc': None, 'lnk': None, # Resource attributes diff --git a/acme/resources/RBO.py b/acme/resources/RBO.py index e0248666..1ad3a479 100644 --- a/acme/resources/RBO.py +++ b/acme/resources/RBO.py @@ -6,6 +6,7 @@ # # ResourceType: mgmtObj:Reboot # +""" MgmtObj:Reboot (RBO) resource type.""" from __future__ import annotations from typing import Optional @@ -17,6 +18,7 @@ from ..helpers.TextTools import findXPath class RBO(MgmtObj): + """ MgmtObj:Reboot (RBO) resource type. """ # Attributes and Attribute policies for this Resource Class # Assigned during startup in the Importer @@ -49,6 +51,7 @@ class RBO(MgmtObj): 'rbo': None, 'far': None } + """ The allowed attributes and their policy for this resource type.""" def __init__(self, dct:Optional[JSON] = None, diff --git a/acme/resources/RBOAnnc.py b/acme/resources/RBOAnnc.py index 379d76db..20a6a659 100644 --- a/acme/resources/RBOAnnc.py +++ b/acme/resources/RBOAnnc.py @@ -6,6 +6,7 @@ # # RBO : Announceable variant # +""" MgmtObj:Reboot announced (RBOA) resource type. """ from __future__ import annotations from typing import Optional @@ -15,6 +16,7 @@ class RBOAnnc(MgmtObjAnnc): + """ MgmtObj:Reboot announced (RBOA) resource type. """ # Attributes and Attribute policies for this Resource Class # Assigned during startup in the Importer @@ -45,6 +47,7 @@ class RBOAnnc(MgmtObjAnnc): 'rbo': None, 'far': None } + """ The allowed attributes and their policy for this resource type.""" def __init__(self, dct:Optional[JSON] = None, diff --git a/acme/resources/REQ.py b/acme/resources/REQ.py index db9b304e..6107d26e 100644 --- a/acme/resources/REQ.py +++ b/acme/resources/REQ.py @@ -11,8 +11,8 @@ from __future__ import annotations from typing import Optional, Dict, Any -from ..etc.Types import AttributePolicyDict, ResourceTypes, Result, RequestStatus, CSERequest, JSON -from ..etc.ResponseStatusCodes import BAD_REQUEST +from ..etc.Types import AttributePolicyDict, ResourceTypes, RequestStatus, CSERequest, JSON +from ..etc.ResponseStatusCodes import ResponseStatusCode from ..helpers.TextTools import setXPath from ..etc.DateUtils import getResourceDate from ..services.Configuration import Configuration @@ -75,7 +75,7 @@ def createRequestResource(request:CSERequest) -> Resource: The created REQ resource. """ - # Check if a an expiration ts has been set in the request + # Check if a request expiration ts has been set in the request if request.rqet: et = request.rqet # This is already an ISO8601 timestamp @@ -83,47 +83,44 @@ def createRequestResource(request:CSERequest) -> Resource: elif request._rpts: et = request._rpts - # otherwise calculate request et + # otherwise get the request's et from the configuration else: et = getResourceDate(offset = Configuration.get('resource.req.et')) - # minEt = getResourceDate(Configuration.get('resource.req.minet')) - # maxEt = getResourceDate(Configuration.get('resource.req.maxet')) - # if request.args.rpts: - # et = request.args.rpts if request.args.rpts < maxEt else maxEt - # else: - # et = minEt + # Build the REQ resource from the original request dct:Dict[str, Any] = { 'm2m:req' : { - 'et' : et, - 'lbl' : [ request.originator ], - 'op' : request.op, - 'tg' : request.id, - 'org' : request.originator, - 'rid' : request.rqi, - 'mi' : { - 'ty' : request.ty, - 'ot' : getResourceDate(), - 'rqet' : request.rqet, - 'rset' : request.rset, - 'rt' : { - 'rtv' : request.rt + 'et': et, + 'lbl': [ request.originator ], + 'op': request.op, + 'tg': request.id, + 'org': request.originator, + 'rid': request.rqi, + 'mi': { + 'ty': request.ty, + 'ot': getResourceDate(), + 'rqet': request.rqet, + 'rset': request.rset, + 'rt': { + 'rtv': request.rt }, - 'rp' : request.rp, - 'rcn' : request.rcn, - 'fc' : { - 'fu' : request.fc.fu, - 'fo' : request.fc.fo, + 'rp': request.rp, + 'rcn': request.rcn, + 'fc': { + 'fu': request.fc.fu, + 'fo': request.fc.fo, }, - 'drt' : request.drt, - 'rvi' : request.rvi if request.rvi else CSE.releaseVersion, - 'vsi' : request.vsi, - 'sqi' : request.sqi, + 'drt': request.drt, + 'rvi': request.rvi if request.rvi else CSE.releaseVersion, + 'vsi': request.vsi, + 'sqi': request.sqi, }, - 'rs' : RequestStatus.PENDING, - # 'ors' : { - # } + 'rs': RequestStatus.PENDING, + 'ors': { + 'rsc': ResponseStatusCode.ACCEPTED, + 'rqi': request.rqi, + } }} # add handlings, conditions and attributes from filter diff --git a/acme/resources/Resource.py b/acme/resources/Resource.py index f49a38f2..41029616 100644 --- a/acme/resources/Resource.py +++ b/acme/resources/Resource.py @@ -11,10 +11,11 @@ # The following import allows to use "Resource" inside a method typing definition from __future__ import annotations from typing import Any, Tuple, cast, Optional, List, overload +import json from copy import deepcopy -from ..etc.Types import ResourceTypes, Result, NotificationEventType, CSERequest, JSON +from ..etc.Types import ResourceTypes, Result, NotificationEventType, CSERequest, JSON, BasicType from ..etc.ResponseStatusCodes import ResponseException, BAD_REQUEST, CONTENTS_UNACCEPTABLE, INTERNAL_SERVER_ERROR from ..etc.Utils import isValidID, uniqueRI, uniqueRN, isUniqueRI, removeNoneValuesFromDict, resourceDiff, normalizeURL, pureResource from ..helpers.TextTools import findXPath, setXPath @@ -36,6 +37,7 @@ _createdInternallyRI = Constants.attrCreatedInternallyRI _imported = Constants.attrImported _isInstantiated = Constants.attrIsInstantiated +_locCoordinate = Constants.attrLocCoordinage _originator = Constants.attrOriginator _modified = Constants.attrModified _remoteID = Constants.attrRemoteID @@ -64,7 +66,8 @@ class Resource(object): # ATTN: There is a similar definition in FCNT, TSB, and others! Don't Forget to add attributes there as well internalAttributes = [ _rtype, _srn, _node, _createdInternallyRI, _imported, - _isInstantiated, _originator, _modified, _remoteID, _rvi] + _isInstantiated, _locCoordinate, + _originator, _modified, _remoteID, _rvi ] """ List of internal attributes and which do not belong to the oneM2M resource attributes """ def __init__(self, @@ -516,6 +519,21 @@ def validate(self, originator:Optional[str] = None, if not (et := parentResource.et): et = getResourceDate(CSE.request.maxExpirationDelta) self.setAttribute('et', et) + + # check loc validity: geo type and number of coordinates + if (loc := self.getFinalResourceAttribute('loc', dct)) is not None: + + # The following line is a hack that is necessary because the name "location" is used with different meanings + # and types in different resources (MgmtObj-DVI and normal resources). This is a quick fix for the moment. + # It only check if this is a DVI resource. If yes, then the loc attribute is not checked. + if CSE.validator.getAttributePolicy(self.ty if self.mgd is None else self.mgd, 'loc').type != BasicType.string: + # crd should have been already check as valid JSON before + # Let's optimize and store the coordinates as a JSON object + crd = CSE.validator.validateGeoLocation(loc) + if dct is not None: + setXPath(dct, f'{self.tpe}/{_locCoordinate}', crd, overwrite = True) + else: + self.setLocationCoordinates(crd) ######################################################################### @@ -699,7 +717,7 @@ def getFinalResourceAttribute(self, key:str, dct:Optional[JSON]) -> Any: dct: The dictionary with updated attributes. Return: - The either updated attribute, or old value if the attribute is not updated. The methon returns *None* if the attribute does not exists. + The either updated attribute, or old value if the attribute is not updated. The method- returns *None* if the attribute does not exists. """ value = self.attribute(key) # old value if dct is not None: @@ -1066,4 +1084,22 @@ def setRVI(self, rvi:str) -> None: Args: rvi: Original CREATE request's *rvi*. """ - self.setAttribute(_rvi, rvi) \ No newline at end of file + self.setAttribute(_rvi, rvi) + + + def getLocationCoordinates(self) -> list: + """ Retrieve a resource's location coordinates (internal attribute). + + Return: + The resource's location coordinates. Might be None. + """ + return self.attribute(_locCoordinate) + + + def setLocationCoordinates(self, crd:JSON) -> None: + """ Set a resource's location coordinates (internal attribute). + + Args: + crd: The location coordinates to assign to a resource. + """ + self.setAttribute(_locCoordinate, crd) \ No newline at end of file diff --git a/acme/resources/SCH.py b/acme/resources/SCH.py new file mode 100644 index 00000000..5194ae10 --- /dev/null +++ b/acme/resources/SCH.py @@ -0,0 +1,128 @@ +# +# SCH.py +# +# (c) 2023 by Andreas Kraft +# License: BSD 3-Clause License. See the LICENSE file for further details. +# +# ResourceType: Schedule +# + +""" Schedule (SCH) resource type. """ + +from __future__ import annotations +from typing import Optional + +from ..etc.Constants import Constants as C +from ..etc.Types import AttributePolicyDict, ResourceTypes, JSON +from ..services.Logging import Logging as L +from ..services import CSE +from ..resources.Resource import Resource +from ..etc.ResponseStatusCodes import CONTENTS_UNACCEPTABLE, NOT_IMPLEMENTED +from ..resources.AnnounceableResource import AnnounceableResource + + +class SCH(AnnounceableResource): + """ Schedule (SCH) resource type. """ + + # Specify the allowed child-resource types + _allowedChildResourceTypes:list[ResourceTypes] = [ ResourceTypes.SUB + ] + """ The allowed child-resource types. """ + + # Attributes and Attribute policies for this Resource Class + # Assigned during startup in the Importer + _attributes:AttributePolicyDict = { + # Common and universal attributes + 'rn': None, + 'ty': None, + 'ri': None, + 'pi': None, + 'ct': None, + 'lt': None, + 'lbl': None, + 'acpi':None, + 'et': None, + 'daci': None, + 'cstn': None, + 'at': None, + 'aa': None, + 'ast': None, + + # Resource attributes + 'se': None, + 'nco': None, + } + """ Attributes and `AttributePolicy` for this resource type. """ + + + def __init__(self, dct:Optional[JSON] = None, pi:Optional[str] = None, create:Optional[bool] = False) -> None: + super().__init__(ResourceTypes.SCH, dct, pi, create = create) + + + + def activate(self, parentResource:Resource, originator:str) -> None: + super().activate(parentResource, originator) + + # Check if the parent is not a resource then the "nco" attribute is not set + _nco = self.nco + if parentResource.ty != ResourceTypes.NOD: + if _nco is not None: + raise CONTENTS_UNACCEPTABLE (L.logWarn(f'"nco" must not be set for a SCH resource that is not a child of a resource')) + + + # If nco is set to true, NOT_IMPLEMENTED is returned + if _nco is not None and _nco == True and not C.networkCoordinationSupported: + raise NOT_IMPLEMENTED (L.logWarn(f'Network Coordinated Operation is not supported by this CSE')) + + # Add the schedule to the schedules DB + CSE.storage.upsertSchedule(self) + + # TODO When is supported + # c)The request shall be rejected with the "OPERATION_NOT_ALLOWED" Response Status Code if the target resource + # is a resource that has a campaignEnabled attribute with a value of true. + + + def update(self, dct: JSON = None, originator: str | None = None, doValidateAttributes: bool | None = True) -> None: + + _nco = self.getFinalResourceAttribute('nco', dct) + _parentResource = self.retrieveParentResource() + + # Check if the parent is not a resource then the "nco" attribute is not set + if _parentResource.ty != ResourceTypes.NOD: + if _nco is not None: + raise CONTENTS_UNACCEPTABLE (L.logWarn(f'"nco" must not be set for a SCH resource that is not a child of a resource')) + + # If nco is set to true, NOT_IMPLEMENTED is returned + if _nco is not None and _nco == True and not C.networkCoordinationSupported: + raise NOT_IMPLEMENTED (L.logWarn(f'Network Coordinated Operation is not supported by this CSE')) + + # TODO When is supported + # c)The request shall be rejected with the "OPERATION_NOT_ALLOWED" Response Status Code + # if thetarget resource is a resource that has a campaignEnabled attribute with a value of true. + + super().update(dct, originator, doValidateAttributes) + + # Update the schedule in the schedules DB + CSE.storage.upsertSchedule(self) + + + def validate(self, originator: str | None = None, dct: JSON | None = None, parentResource: Resource | None = None) -> None: + super().validate(originator, dct, parentResource) + + # Set the active schedule in the CSE when updated + if parentResource.ty == ResourceTypes.CSEBase: + CSE.cseActiveSchedule = self.getFinalResourceAttribute('se/sce', dct) + L.isDebug and L.logDebug(f'Setting active schedule in CSE to {CSE.cseActiveSchedule}') + + + def deactivate(self, originator: str) -> None: + + # TODO When is supported + # a) The request shall be rejected with the "OPERATION_NOT_ALLOWED" Response Status Code + # if the target resource is a resource that has a campaignEnabled attribute with a value of true. + + super().deactivate(originator) + + # Remove the schedule from the schedules DB + CSE.storage.removeSchedule(self) + diff --git a/acme/resources/SCHAnnc.py b/acme/resources/SCHAnnc.py new file mode 100644 index 00000000..e1252259 --- /dev/null +++ b/acme/resources/SCHAnnc.py @@ -0,0 +1,57 @@ +# +# SCHAnnc.py +# +# (c) 2023 by Andreas Kraft +# License: BSD 3-Clause License. See the LICENSE file for further details. +# +# ResourceType: Schedule Announced +# + +""" Schedule Announced(SCHA) resource type. """ + +from __future__ import annotations +from typing import Optional + +from ..etc.Types import AttributePolicyDict, ResourceTypes, JSON +from ..services.Logging import Logging as L +from .AnnouncedResource import AnnouncedResource + + +class SCHAnnc(AnnouncedResource): + """ Schedule Announced (SCHA) resource type. """ + + # Specify the allowed child-resource types + _allowedChildResourceTypes:list[ResourceTypes] = [ ] + """ The allowed child-resource types. """ + + # Attributes and Attribute policies for this Resource Class + # Assigned during startup in the Importer + _attributes:AttributePolicyDict = { + # Common and universal attributes + 'rn': None, + 'ty': None, + 'ri': None, + 'pi': None, + 'ct': None, + 'lt': None, + 'et': None, + 'lbl': None, + 'acpi':None, + 'daci': None, + 'lnk': None, + 'ast': None, + + # Resource attributes + 'se': None, + 'nco': None, + } + """ Attributes and `AttributePolicy` for this resource type. """ + + + def __init__(self, dct:Optional[JSON] = None, + pi:Optional[str] = None, + create:Optional[bool] = False) -> None: + super().__init__(ResourceTypes.SCHAnnc, dct, pi = pi, create = create) + + +# TODO coninue \ No newline at end of file diff --git a/acme/resources/SMD.py b/acme/resources/SMD.py index 9c4f0b38..5218c949 100644 --- a/acme/resources/SMD.py +++ b/acme/resources/SMD.py @@ -141,7 +141,7 @@ def validate(self, originator:Optional[str] = None, try: CSE.semantic.validateDescriptor(self) except ResponseException as e: - raise BAD_REQUEST(dbg = e.dbg) + raise BAD_REQUEST(e.dbg) # Perform Semantic validation process and add descriptor if findXPath(dct, 'm2m:smd/dsp') or dct is None: # only on create or when descriptor is present in the UPDATE request diff --git a/acme/resources/SMDAnnc.py b/acme/resources/SMDAnnc.py index a35301f0..740e36f9 100644 --- a/acme/resources/SMDAnnc.py +++ b/acme/resources/SMDAnnc.py @@ -33,7 +33,6 @@ class SMDAnnc(AnnouncedResource): 'acpi':None, 'daci': None, 'ast': None, - 'loc': None, 'lnk': None, # Resource attributes diff --git a/acme/resources/SUB.py b/acme/resources/SUB.py index 49caed50..344f006b 100644 --- a/acme/resources/SUB.py +++ b/acme/resources/SUB.py @@ -29,7 +29,8 @@ class SUB(Resource): # Specify the allowed child-resource types - _allowedChildResourceTypes:list[ResourceTypes] = [ ] + _allowedChildResourceTypes:list[ResourceTypes] = [ ResourceTypes.SCH + ] # Attributes and Attribute policies for this Resource Class # Assigned during startup in the Importer @@ -99,25 +100,28 @@ def activate(self, parentResource:Resource, originator:str) -> None: # Apply the nct only on the first element of net. Do the combination checks later in validate() net = self['enc/net'] if len(net) > 0: - if net[0] in [ NotificationEventType.resourceUpdate, NotificationEventType.resourceDelete, - NotificationEventType.createDirectChild, NotificationEventType.deleteDirectChild, - NotificationEventType.retrieveCNTNoChild ]: - self.setAttribute('nct', NotificationContentType.allAttributes, overwrite = False) - elif net[0] in [ NotificationEventType.triggerReceivedForAE ]: - self.setAttribute('nct', NotificationContentType.triggerPayload, overwrite = False) - elif net[0] in [ NotificationEventType.blockingUpdate ]: - self.setAttribute('nct', NotificationContentType.modifiedAttributes, overwrite = False) - elif net[0] in [ NotificationEventType.reportOnGeneratedMissingDataPoints ]: - self.setAttribute('nct', NotificationContentType.timeSeriesNotification, overwrite = False) - + match net[0]: + case NotificationEventType.resourceUpdate |\ + NotificationEventType.resourceDelete |\ + NotificationEventType.createDirectChild |\ + NotificationEventType.deleteDirectChild |\ + NotificationEventType.retrieveCNTNoChild: + self.setAttribute('nct', NotificationContentType.allAttributes, overwrite = False) + + case NotificationEventType.triggerReceivedForAE: + self.setAttribute('nct', NotificationContentType.triggerPayload, overwrite = False) + + case NotificationEventType.blockingUpdate: + self.setAttribute('nct', NotificationContentType.modifiedAttributes, overwrite = False) + + case NotificationEventType.reportOnGeneratedMissingDataPoints: + self.setAttribute('nct', NotificationContentType.timeSeriesNotification, overwrite = False) + # check whether an observed child resource type is actually allowed by the parent if chty := self['enc/chty']: self._checkAllowedCHTY(parentResource, chty) - # nsi is at least an empty list if nse is present, otherwise it must not be present - if self.nse is not None: - self.setAttribute('nsi', [], overwrite = False) - CSE.notification.validateAndConstructNotificationStatsInfo(self) + # "nsi" will be added later during the first stat recording CSE.notification.addSubscription(self, originator) @@ -266,6 +270,15 @@ def validate(self, originator:Optional[str] = None, self._normalizeURIAttribute('su') + def childWillBeAdded(self, childResource: Resource, originator: str) -> None: + super().childWillBeAdded(childResource, originator) + if childResource.ty == ResourceTypes.SCH: + if (rn := childResource._originalDict.get('rn')) is None: + childResource.setResourceName('notificationSchedule') + elif rn != 'notificationSchedule': + raise BAD_REQUEST(L.logDebug(f'rn of under must be "notificationSchedule"')) + + def _checkAllowedCHTY(self, parentResource:Resource, chty:list[ResourceTypes]) -> None: """ Check whether an observed child resource types are actually allowed by the parent. diff --git a/acme/resources/TS.py b/acme/resources/TS.py index 71b7b616..aba57599 100644 --- a/acme/resources/TS.py +++ b/acme/resources/TS.py @@ -230,46 +230,49 @@ def childWillBeAdded(self, childResource:Resource, originator:str) -> None: 'pi': self.ri, 'dgt': childResource.dgt}) if len(tsis) > 0: # Error if yes - raise CONFLICT(dbg = f'timeSeriesInstance with the same dgt: {childResource.dgt} already exists') + raise CONFLICT(f'timeSeriesInstance with the same dgt: {childResource.dgt} already exists') # Handle the addition of new TSI. Basically, get rid of old ones. def childAdded(self, childResource:Resource, originator:str) -> None: L.isDebug and L.logDebug(f'Child resource added: {childResource.ri}') super().childAdded(childResource, originator) - if childResource.ty == ResourceTypes.TSI: # Validate if child is TSI - - # Check for mia handling. This sets the et attribute in the TSI - if self.mia is not None: - # Take either mia or the maxExpirationDelta, whatever is smaller - maxEt = getResourceDate(self.mia - if self.mia <= CSE.request.maxExpirationDelta - else CSE.request.maxExpirationDelta) - # Only replace the childresource's et if it is greater than the calculated maxEt - if childResource.et > maxEt: - childResource.setAttribute('et', maxEt) - childResource.dbUpdate(True) - - self.validate(originator) # Handle old TSI removals - - # Add to monitoring if this is enabled for this TS (mdd & pei & mdt are not None, and mdd==True) - if self.mdd and self.pei is not None and self.mdt is not None: - CSE.timeSeries.updateTimeSeries(self, childResource) - - elif childResource.ty == ResourceTypes.SUB: # start monitoring - if childResource['enc/md']: - CSE.timeSeries.addSubscription(self, childResource) + match childResource.ty: + case ResourceTypes.TSI: + # Check for mia handling. This sets the et attribute in the TSI + if self.mia is not None: + # Take either mia or the maxExpirationDelta, whatever is smaller + maxEt = getResourceDate(self.mia + if self.mia <= CSE.request.maxExpirationDelta + else CSE.request.maxExpirationDelta) + # Only replace the childresource's et if it is greater than the calculated maxEt + if childResource.et > maxEt: + childResource.setAttribute('et', maxEt) + childResource.dbUpdate(True) + + self.validate(originator) # Handle old TSI removals + + # Add to monitoring if this is enabled for this TS (mdd & pei & mdt are not None, and mdd==True) + if self.mdd and self.pei is not None and self.mdt is not None: + CSE.timeSeries.updateTimeSeries(self, childResource) + + case ResourceTypes.SUB: + # start monitoring + if childResource['enc/md']: + CSE.timeSeries.addSubscription(self, childResource) # Handle the removal of a TSI. def childRemoved(self, childResource:Resource, originator:str) -> None: L.isDebug and L.logDebug(f'Child resource removed: {childResource.ri}') super().childRemoved(childResource, originator) - if childResource.ty == ResourceTypes.TSI: # Validate if child was TSI - self._validateChildren() - elif childResource.ty == ResourceTypes.SUB: - if childResource['enc/md']: - CSE.timeSeries.removeSubscription(self, childResource) + match childResource.ty: + case ResourceTypes.TSI: + # Validate if removed child was TSI + self._validateChildren() + case ResourceTypes.SUB: + if childResource['enc/md']: + CSE.timeSeries.removeSubscription(self, childResource) # handle eventuel updates of subscriptions @@ -365,6 +368,8 @@ def _validateDataDetect(self, updatedAttributes:Optional[JSON] = None) -> None: # Always set the mdc to the length of mdlt if present if self.mdlt is not None: self.setAttribute('mdc', len(self.mdlt)) + else: + self.setAttribute('mdc', 0) # Save changes self.dbUpdate(True) @@ -389,7 +394,7 @@ def _clearMdlt(self, overwrite:Optional[bool] = True) -> None: def timeSeriesInstances(self) -> list[Resource]: """ Get all timeSeriesInstances of a timeSeries and return a sorted (by ct) list """ - return sorted(CSE.dispatcher.directChildResources(self.ri, ResourceTypes.TSI), key = lambda x: x.ct) # type:ignore[no-any-return] + return sorted(CSE.dispatcher.retrieveDirectChildResources(self.ri, ResourceTypes.TSI), key = lambda x: x.ct) # type:ignore[no-any-return] def addDgtToMdlt(self, dgtToAdd:float) -> None: diff --git a/acme/resources/TSI.py b/acme/resources/TSI.py index e3aeb3f7..bc40f044 100644 --- a/acme/resources/TSI.py +++ b/acme/resources/TSI.py @@ -39,6 +39,7 @@ class TSI(AnnounceableResource): 'cr': None, 'loc': None, + # Resource attributes 'dgt': None, 'con': None, diff --git a/acme/services/AnnouncementManager.py b/acme/services/AnnouncementManager.py index a6cd1f6b..b82691bc 100644 --- a/acme/services/AnnouncementManager.py +++ b/acme/services/AnnouncementManager.py @@ -293,8 +293,8 @@ def checkCSEBaseAnnouncement(cseBase:AnnounceableResource) -> None: # Don't allow instances to be announced without their parents if resource.ty in [ResourceTypes.CIN, ResourceTypes.FCI, ResourceTypes.TSI]: - raise OPERATION_NOT_ALLOWED(L.logDebug('announcing instances without their parents is not allowed')) - + L.logWarn('Announcing instances without their parents is not allowed. Unsuccessful announcement') + return # Whatever the parent resource is, check whether the CSEBase has been announced. Announce it if necessay # and set the announced CSEBase as new parent checkCSEBaseAnnouncement(parentResource := getCSE()) @@ -422,17 +422,15 @@ def announceUpdatedResource(self, resource:AnnounceableResource, originator:str) # Update the annoucned remote resources announcedCSIs = [] - remoteRIs = [] for (csi, remoteRI) in resource.getAnnouncedTo(): if csi == originator: # Skip the announced resource at the originator !! continue announcedCSIs.append(csi) # build a list of already announced CSIs - remoteRIs.append(csi) # build a list of remote RIs self.updateResourceOnCSI(resource, csi, remoteRI) # Check for any non-announced csi in at, and possibly announce them for csi in CSIsFromAnnounceTo: - if csi not in announcedCSIs and csi not in remoteRIs: + if csi not in announcedCSIs: self.announceResourceToCSI(resource, csi) diff --git a/acme/services/CSE.py b/acme/services/CSE.py index ab3b7931..3da4fea3 100644 --- a/acme/services/CSE.py +++ b/acme/services/CSE.py @@ -30,6 +30,7 @@ from ..services.GroupManager import GroupManager from ..services.HttpServer import HttpServer from ..services.Importer import Importer +from ..services.LocationManager import LocationManager from ..services.MQTTClient import MQTTClient from ..services.NotificationManager import NotificationManager from ..services.RegistrationManager import RegistrationManager @@ -73,6 +74,9 @@ importer:Importer = None """ Runtime instance of the `Importer`. """ +location:LocationManager = None +""" Runtime instance of the `LocationManager`. """ + mqttClient:MQTTClient = None """ Runtime instance of the `MQTTClient`. """ @@ -169,6 +173,9 @@ cseStatus:CSEStatus = CSEStatus.STOPPED """ The CSE's internal runtime status. """ +cseActiveSchedule:list[str] = [] +""" List of active schedules when the CSE is active and will process requests. """ + _cseResetLock = Lock() # lock for resetting the CSE """ Internal CSE's lock when resetting. """ @@ -186,7 +193,7 @@ def startup(args:argparse.Namespace, **kwargs:Dict[str, Any]) -> bool: Return: False if the CSE couldn't initialized and started. """ - global action, announce, console, dispatcher, event, groupResource, httpServer, importer, mqttClient, notification, registration + global action, announce, console, dispatcher, event, groupResource, httpServer, importer, location, mqttClient, notification, registration global remote, request, script, security, semantic, statistics, storage, textUI, time, timeSeries, validator global aeStatistics global supportedReleaseVersions, cseType, defaultSerialization, cseCsi, cseCsiSlash, cseCsiSlashLess, cseAbsoluteSlash @@ -240,10 +247,11 @@ def startup(args:argparse.Namespace, **kwargs:Dict[str, Any]) -> bool: # init Logging # L.init() + L.queueOff() # No queuing of log messages during startup L.log('Starting CSE') L.log(f'CSE-Type: {cseType.name}') - L.log(Configuration.print()) - L.queueOff() # No queuing of log messages during startup + for l in Configuration.print().split('\n'): + L.log(l) # set the logger for the backgroundWorkers. Add an offset to compensate for # this and other redirect functions to determine the correct file / linenumber @@ -271,6 +279,7 @@ def startup(args:argparse.Namespace, **kwargs:Dict[str, Any]) -> bool: remote = RemoteCSEManager() # Initialize the remote CSE manager announce = AnnouncementManager() # Initialize the announcement manager semantic = SemanticManager() # Initialize the semantic manager + location = LocationManager() # Initialize the location manager time = TimeManager() # Initialize the time mamanger script = ScriptManager() # Initialize the script manager action = ActionManager() # Initialize the action manager @@ -358,6 +367,7 @@ def _shutdown() -> None: textUI and textUI.shutdown() console and console.shutdown() time and time.shutdown() + location and location.shutdown() semantic and semantic.shutdown() remote and remote.shutdown() mqttClient and mqttClient.shutdown() diff --git a/acme/services/Configuration.py b/acme/services/Configuration.py index 8d94afc8..667cd604 100644 --- a/acme/services/Configuration.py +++ b/acme/services/Configuration.py @@ -6,6 +6,8 @@ # # Managing CSE configurations # +""" This module implements the configuration of the CSE. It reads the configuration file, performs checks, + and provides access to the configuration values. """ from __future__ import annotations @@ -51,6 +53,7 @@ 'textui': 'https://github.com/ankraft/ACME-oneM2M-CSE/blob/master/docs/Configuration.md#textui', 'webui': 'https://github.com/ankraft/ACME-oneM2M-CSE/blob/master/docs/Configuration.md#webui', } +""" Documentation links for configuration settings. These are used in the console and text UIto show the documentation for a configuration setting. """ # # Deprecated secttions @@ -74,6 +77,7 @@ ('cse.textui', 'textui'), ('cse.scripting', 'scripting') ) +""" Deprecated sections. Mapping from old section name to new section name.""" @@ -82,35 +86,67 @@ class Configuration(object): method init(). Access to configuration valus is done by calling Configuration.get(). """ _configuration: Dict[str, Any] = {} + """ The configuration values as a dictionary. """ _configurationDocs: Dict[str, str] = {} + """ The configuration values documentation as a dictionary. """ _defaultConfigFile:str = None + """ The default configuration file. """ _argsConfigfile:str = None + """ The configuration file passed as argument. This overrides the respective value in the configuration file. """ _argsLoglevel:str = None + """ The log level passed as argument. This overrides the respective value in the configuration file. """ _argsDBReset:bool = None + """ The reset DB flag passed as argument. This overrides the respective value in the configuration file. """ _argsDBStorageMode:str = None + """ The DB storage mode passed as argument. This overrides the respective value in the configuration file. """ _argsHeadless:bool = None + """ The headless flag passed as argument. This overrides the respective value in the configuration file. """ _argsHttpAddress:str = None + """ The http address passed as argument. This overrides the respective value in the configuration file. """ _argsHttpPort:int = None + """ The http port passed as argument. This overrides the respective value in the configuration file. """ _argsImportDirectory:str = None + """ The import directory passed as argument. This overrides the respective value in the configuration file. """ _argsListenIF:str = None + """ The network interface passed as argument. This overrides the respective value in the configuration file. """ _argsMqttEnabled:bool = None + """ The mqtt enabled flag passed as argument. This overrides the respective value in the configuration file. """ _argsRemoteCSEEnabled:bool = None + """ The remote CSE enabled flag passed as argument. This overrides the respective value in the configuration file. """ _argsRunAsHttps:bool = None + """ The https flag passed as argument. This overrides the respective value in the configuration file. """ + _argsRunAsHttpWsgi:bool = None + """ The http WSGI flag passed as argument. This overrides the respective value in the configuration file. """ _argsStatisticsEnabled:bool = None + """ The statistics enabled flag passed as argument. This overrides the respective value in the configuration file. """ _argsTextUI:bool = None + """ The text UI flag passed as argument. This overrides the respective value in the configuration file. """ # Internal print function that takes the headless setting into account @staticmethod def _print(msg:str) -> None: + """ Print a message to the console. If the CSE is running in headless mode, then the message is not printed. + + Args: + msg: The message to print. + """ if not Configuration._argsHeadless: Console().print(msg) # Print error message to console @staticmethod - def init(args:argparse.Namespace = None) -> bool: + def init(args:Optional[argparse.Namespace] = None) -> bool: + """ Initialize and read the configuration. This method must be called before accessing any configuration value. + + Args: + args: Optional arguments. If not given, then the command line arguments are used. + + Returns: + True on success, False otherwise. + """ # The default ini file Configuration._defaultConfigFile = f'{pathlib.Path.cwd()}{os.sep}{C.defaultConfigFile}' @@ -128,6 +164,7 @@ def init(args:argparse.Namespace = None) -> bool: Configuration._argsMqttEnabled = args.mqttenabled if args and 'mqttenabled' in args else None Configuration._argsRemoteCSEEnabled = args.remotecseenabled if args and 'remotecseenabled' in args else None Configuration._argsRunAsHttps = args.https if args and 'https' in args else None + Configuration._argsRunAsHttpWsgi = args.httpWsgi if args and 'httpWsgi' in args else None Configuration._argsStatisticsEnabled = args.statisticsenabled if args and 'statisticsenabled' in args else None Configuration._argsTextUI = args.textui if args and 'textui' in args else None @@ -146,10 +183,10 @@ def init(args:argparse.Namespace = None) -> bool: # Read and parse the configuration file - config = configparser.ConfigParser( interpolation=configparser.ExtendedInterpolation(), + config = configparser.ConfigParser( interpolation = configparser.ExtendedInterpolation(), # Convert csv to list, ignore empty elements - converters={'list': lambda x: [i.strip() for i in x.split(',') if i]} + converters = {'list': lambda x: [i.strip() for i in x.split(',') if i]} ) config.read_dict({ 'basic.config': { 'baseDirectory' : pathlib.Path(os.path.abspath(os.path.dirname(__file__))).parent.parent, # points to the acme module's parent directory @@ -302,7 +339,7 @@ def init(args:argparse.Namespace = None) -> bool: 'http.allowPatchForDelete' : config.getboolean('http', 'allowPatchForDelete', fallback = False), 'http.enableStructureEndpoint' : config.getboolean('http', 'enableStructureEndpoint', fallback = False), 'http.enableUpperTesterEndpoint' : config.getboolean('http', 'enableUpperTesterEndpoint', fallback = False), - 'http.listenIF' : config.get('http', 'listenIF', fallback = '127.0.0.1'), + 'http.listenIF' : config.get('http', 'listenIF', fallback = '0.0.0.0'), 'http.port' : config.getint('http', 'port', fallback = 8080), 'http.root' : config.get('http', 'root', fallback = ''), 'http.timeout' : config.getfloat('http', 'timeout', fallback = 10.0), @@ -314,6 +351,7 @@ def init(args:argparse.Namespace = None) -> bool: 'http.cors.enable' : config.getboolean('http.cors', 'enable', fallback = False), 'http.cors.resources' : config.getlist('http.cors', 'resources', fallback = [ r'/*' ]), # type: ignore [attr-defined] + # # HTTP Server Security # @@ -323,6 +361,20 @@ def init(args:argparse.Namespace = None) -> bool: 'http.security.tlsVersion' : config.get('http.security', 'tlsVersion', fallback = 'auto'), 'http.security.useTLS' : config.getboolean('http.security', 'useTLS', fallback = False), 'http.security.verifyCertificate' : config.getboolean('http.security', 'verifyCertificate', fallback = False), + 'http.security.enableBasicAuth' : config.getboolean('http.security', 'enableBasicAuth', fallback = False), + 'http.security.enableTokenAuth' : config.getboolean('http.security', 'enableTokenAuth', fallback = False), + 'http.security.basicAuthFile' : config.get('http.security', 'basicAuthFile', fallback = './certs/http_basic_auth.txt'), + 'http.security.tokenAuthFile' : config.get('http.security', 'tokenAuthFile', fallback = './certs/http_token_auth.txt'), + + + # + # HTTP Server WSGI + # + + 'http.wsgi.enable' : config.getboolean('http.wsgi', 'enable', fallback = False), + 'http.wsgi.connectionLimit' : config.getint('http.wsgi', 'connectionLimit', fallback = 100), + 'http.wsgi.threadPoolSize' : config.getint('http.wsgi', 'threadPoolSize', fallback = 100), + # # Logging @@ -334,11 +386,13 @@ def init(args:argparse.Namespace = None) -> bool: 'logging.enableScreenLogging' : config.getboolean('logging', 'enableScreenLogging', fallback = True), 'logging.filter' : config.getlist('logging', 'filter', fallback = []), # type: ignore [attr-defined] 'logging.level' : config.get('logging', 'level', fallback = 'debug'), + 'logging.maxLogMessageLength' : config.getint('logging', 'maxLogMessageLength', fallback = 1000), # Max length of a log message 'logging.path' : config.get('logging', 'path', fallback = './logs'), 'logging.queueSize' : config.getint('logging', 'queueSize', fallback = 5000), # Size of the log queue 'logging.size' : config.getint('logging', 'size', fallback = 100000), 'logging.stackTraceOnError' : config.getboolean('logging', 'stackTraceOnError', fallback = True), + # # MQTT Client # @@ -346,7 +400,7 @@ def init(args:argparse.Namespace = None) -> bool: 'mqtt.address' : config.get('mqtt', 'address', fallback = '127.0.0.1'), 'mqtt.enable' : config.getboolean('mqtt', 'enable', fallback = False), 'mqtt.keepalive' : config.getint('mqtt', 'keepalive', fallback = 60), - 'mqtt.listenIF' : config.get('mqtt', 'listenIF', fallback = '127.0.0.1'), + 'mqtt.listenIF' : config.get('mqtt', 'listenIF', fallback = '0.0.0.0'), 'mqtt.port' : config.getint('mqtt', 'port', fallback = None), # Default will be determined later (s.b.) 'mqtt.timeout' : config.getfloat('mqtt', 'timeout', fallback = 10.0), 'mqtt.topicPrefix' : config.get('mqtt', 'topicPrefix', fallback = ''), @@ -362,6 +416,24 @@ def init(args:argparse.Namespace = None) -> bool: 'mqtt.security.useTLS' : config.getboolean('mqtt.security', 'useTLS', fallback = False), 'mqtt.security.verifyCertificate' : config.getboolean('mqtt.security', 'verifyCertificate', fallback = False), + # + # CoAP Client + # + + 'coap.enable' : config.getboolean('coap', 'enable', fallback = False), + 'coap.listenIF' : config.get('coap', 'listenIF', fallback = '0.0.0.0'), + 'coap.port' : config.getint('coap', 'port', fallback = None), # Default will be determined later (s.b.) + + # + # CoAP Client Security + # + + 'coap.security.certificateFile' : config.get('coap.security', 'certificateFile', fallback = None), + 'coap.security.privateKeyFile' : config.get('coap.security', 'privateKeyFile', fallback = None), + 'coap.security.dtlsVersion' : config.get('coap.security', 'dtlsVersion', fallback = 'auto'), + 'coap.security.useDTLS' : config.getboolean('coap.security', 'useDTLS', fallback = False), + 'coap.security.verifyCertificate' : config.getboolean('coap.security', 'verifyCertificate', fallback = False), + # # Defaults for Access Control Policies @@ -387,6 +459,21 @@ def init(args:argparse.Namespace = None) -> bool: 'resource.cnt.mbs' : config.getint('resource.cnt', 'mbs', fallback = 10000), + # + # Defaults for Group Resources + # + + 'resource.grp.resultExpirationTime' : config.getint('resource.grp', 'resultExpirationTime', fallback = 0), + + + # + # Defaults for LocationPolicy Resources + # + + 'resource.lcp.mni' : config.getint('resource.lcp', 'mni', fallback = 10), + 'resource.lcp.mbs' : config.getint('resource.lcp', 'mbs', fallback = 10000), + + # # Defaults for Request Resources # @@ -425,6 +512,7 @@ def init(args:argparse.Namespace = None) -> bool: 'scripting.fileMonitoringInterval' : config.getfloat('scripting', 'fileMonitoringInterval', fallback = 2.0), 'scripting.scriptDirectories' : config.getlist('scripting', 'scriptDirectories', fallback = []), # type: ignore[attr-defined] 'scripting.verbose' : config.getboolean('scripting', 'verbose', fallback = False), + 'scripting.maxRuntime' : config.getfloat('scripting', 'maxRuntime', fallback = 60.0), # # Text UI @@ -458,86 +546,120 @@ def init(args:argparse.Namespace = None) -> bool: @staticmethod def validate(initial:Optional[bool] = False) -> Tuple[bool, str]: + """ Validates the configuration and returns a tuple (bool, str) with the result and an error message if applicable. + + Args: + initial: True if this is the initial validation during startup, False otherwise. Default: False + + Returns: + A tuple (bool, str) with the result and an error message if applicable. + """ # Some clean-ups and overrides + def _get(key:str) -> Any: + """ Helper function to retrieve a configuration value. If the value is not found, None is returned. + + Args: + key: The configuration key to retrieve. + """ + return Configuration.get(key) + + + def _put(key:str, value:Any) -> None: + """ Helper function to set a configuration value. + + Args: + key: The configuration key to set. + """ + Configuration._configuration[key] = value + + from ..etc.Utils import normalizeURL, isValidCSI # cannot import at the top because of circel import # CSE type - if isinstance(cseType := Configuration._configuration['cse.type'], str): + if isinstance(cseType := _get('cse.type'), str): cseType = cseType.lower() - if cseType == 'asn': - Configuration._configuration['cse.type'] = CSEType.ASN - elif cseType == 'mn': - Configuration._configuration['cse.type'] = CSEType.MN - else: - Configuration._configuration['cse.type'] = CSEType.IN + match cseType: + case 'asn': + _put('cse.type', CSEType.ASN) + case 'mn': + _put('cse.type', CSEType.MN) + case 'in': + _put('cse.type', CSEType.IN) + case _: + return False, f'Configuration Error: Unsupported \[cse]:type: {cseType}' # CSE Serialization - if isinstance(ct := Configuration._configuration['cse.defaultSerialization'], str): - Configuration._configuration['cse.defaultSerialization'] = ContentSerializationType.toContentSerialization(ct) - if Configuration._configuration['cse.defaultSerialization'] == ContentSerializationType.UNKNOWN: + if isinstance(ct := _get('cse.defaultSerialization'), str): + _put('cse.defaultSerialization', ContentSerializationType.toContentSerialization(ct)) + if _get('cse.defaultSerialization') == ContentSerializationType.UNKNOWN: return False, f'Configuration Error: Unsupported \[cse]:defaultSerialization: {ct}' # Registrar Serialization - if isinstance(ct := Configuration._configuration['cse.registrar.serialization'], str): - Configuration._configuration['cse.registrar.serialization'] = ContentSerializationType.toContentSerialization(ct) - if Configuration._configuration['cse.registrar.serialization'] == ContentSerializationType.UNKNOWN: + if isinstance(ct := _get('cse.registrar.serialization'), str): + _put('cse.registrar.serialization', ContentSerializationType.toContentSerialization(ct)) + if _get('cse.registrar.serialization') == ContentSerializationType.UNKNOWN: return False, f'Configuration Error: Unsupported \[cse.registrar]:serialization: {ct}' # Loglevel and various overrides from command line from ..services.Logging import LogLevel - if isinstance(logLevel := Configuration._configuration['logging.level'], str): + if isinstance(logLevel := _get('logging.level'), str): logLevel = logLevel.lower() logLevel = (Configuration._argsLoglevel or logLevel) # command line args override config - if logLevel == 'off': - Configuration._configuration['logging.level'] = LogLevel.OFF - elif logLevel == 'info': - Configuration._configuration['logging.level'] = LogLevel.INFO - elif logLevel == 'warn': - Configuration._configuration['logging.level'] = LogLevel.WARNING - elif logLevel == 'error': - Configuration._configuration['logging.level'] = LogLevel.ERROR - else: - Configuration._configuration['logging.level'] = LogLevel.DEBUG + + match logLevel: + case 'off': + _put('logging.level', LogLevel.OFF) + case 'info': + _put('logging.level', LogLevel.INFO) + case 'warn' | 'warning': + _put('logging.level', LogLevel.WARNING) + case 'error': + _put('logging.level', LogLevel.ERROR) + case 'debug': + _put('logging.level', LogLevel.DEBUG) + case _: + return False, f'Configuration Error: Unsupported \[logging]:level: {logLevel}' + # Test for correct logging queue size if (queueSize := Configuration._configuration['logging.queueSize']) < 0: return False, f'Configuration Error: \[logging]:queueSize must be 0 or greater' # Overwriting some configurations from command line - if Configuration._argsDBReset is True: Configuration._configuration['database.resetOnStartup'] = True # Override DB reset from command line - if Configuration._argsDBStorageMode is not None: Configuration._configuration['database.inMemory'] = Configuration._argsDBStorageMode == 'memory' # Override DB storage mode from command line - if Configuration._argsHttpAddress is not None: Configuration._configuration['http.address'] = Configuration._argsHttpAddress # Override server http address - if Configuration._argsHttpPort is not None: Configuration._configuration['http.port'] = Configuration._argsHttpPort # Override server http port - if Configuration._argsImportDirectory is not None: Configuration._configuration['cse.resourcesPath'] = Configuration._argsImportDirectory # Override import directory from command line - if Configuration._argsListenIF is not None: Configuration._configuration['http.listenIF'] = Configuration._argsListenIF # Override binding network interface - if Configuration._argsMqttEnabled is not None: Configuration._configuration['mqtt.enable'] = Configuration._argsMqttEnabled # Override mqtt enable - if Configuration._argsRemoteCSEEnabled is not None: Configuration._configuration['cse.enableRemoteCSE'] = Configuration._argsRemoteCSEEnabled # Override remote CSE enablement - if Configuration._argsRunAsHttps is not None: Configuration._configuration['http.security.useTLS'] = Configuration._argsRunAsHttps # Override useTLS - if Configuration._argsStatisticsEnabled is not None: Configuration._configuration['cse.statistics.enable'] = Configuration._argsStatisticsEnabled # Override statistics enablement - if Configuration._argsTextUI is not None: Configuration._configuration['textui.startWithTUI'] = Configuration._argsTextUI - if Configuration._argsHeadless is True: - Configuration._configuration['console.headless'] = True + if Configuration._argsDBReset is True: _put('database.resetOnStartup', True) # Override DB reset from command line + if Configuration._argsDBStorageMode is not None: _put('database.inMemory', Configuration._argsDBStorageMode == 'memory') # Override DB storage mode from command line + if Configuration._argsHttpAddress is not None: _put('http.address', Configuration._argsHttpAddress) # Override server http address + if Configuration._argsHttpPort is not None: _put('http.port', Configuration._argsHttpPort) # Override server http port + if Configuration._argsImportDirectory is not None: _put('cse.resourcesPath', Configuration._argsImportDirectory) # Override import directory from command line + if Configuration._argsListenIF is not None: _put('http.listenIF', Configuration._argsListenIF) # Override binding network interface + if Configuration._argsMqttEnabled is not None: _put('mqtt.enable', Configuration._argsMqttEnabled) # Override mqtt enable + if Configuration._argsRemoteCSEEnabled is not None: _put('cse.enableRemoteCSE', Configuration._argsRemoteCSEEnabled) # Override remote CSE enablement + if Configuration._argsRunAsHttps is not None: _put('http.security.useTLS', Configuration._argsRunAsHttps) # Override useTLS + if Configuration._argsRunAsHttpWsgi is not None: _put('http.wsgi.enable', Configuration._argsRunAsHttpWsgi) # Override use WSGI + if Configuration._argsStatisticsEnabled is not None: _put('cse.statistics.enable', Configuration._argsStatisticsEnabled) # Override statistics enablement + if Configuration._argsTextUI is not None: _put('textui.startWithTUI', Configuration._argsTextUI) + if Configuration._argsHeadless is True: _put('console.headless', True) # Correct urls - Configuration._configuration['cse.registrar.address'] = normalizeURL(Configuration._configuration['cse.registrar.address']) - Configuration._configuration['http.address'] = normalizeURL(Configuration._configuration['http.address']) - Configuration._configuration['http.root'] = normalizeURL(Configuration._configuration['http.root']) - Configuration._configuration['cse.registrar.root'] = normalizeURL(Configuration._configuration['cse.registrar.root']) + _put('cse.registrar.address', normalizeURL(Configuration._configuration['cse.registrar.address'])) + _put('http.address', normalizeURL(Configuration._configuration['http.address'])) + _put('http.root', normalizeURL(Configuration._configuration['http.root'])) + _put('cse.registrar.root', normalizeURL(Configuration._configuration['cse.registrar.root'])) # Just in case: check the URL's - if Configuration._configuration['http.security.useTLS']: - if Configuration._configuration['http.address'].startswith('http:'): + if _get('http.security.useTLS'): + if _get('http.address').startswith('http:'): Configuration._print('[orange3]Configuration Warning: Changing "http" to "https" in [i]\[http]:address[/i]') - Configuration._configuration['http.address'] = Configuration._configuration['http.address'].replace('http:', 'https:') + _put('http.address', _get('http.address').replace('http:', 'https:')) # registrar might still be accessible vi another protocol # if Configuration._configuration['cse.registrar.address'].startswith('http:'): # _print('[orange3]Configuration Warning: Changing "http" to "https" in \[cse.registrar]:address') # Configuration._configuration['cse.registrar.address'] = Configuration._configuration['cse.registrar.address'].replace('http:', 'https:') else: - if Configuration._configuration['http.address'].startswith('https:'): + if _get('http.address').startswith('https:'): Configuration._print('[orange3]Configuration Warning: Changing "https" to "http" in [i]\[http]:address[/i]') - Configuration._configuration['http.address'] = Configuration._configuration['http.address'].replace('https:', 'http:') + _put('http.address', _get('http.address').replace('https:', 'http:')) # registrar might still be accessible vi another protocol # if Configuration._configuration['cse.registrar.address'].startswith('https:'): # _print('[orange3]Configuration Warning: Changing "https" to "http" in \[cse.registrar]:address') @@ -545,11 +667,11 @@ def validate(initial:Optional[bool] = False) -> Tuple[bool, str]: # Operation - if Configuration._configuration['cse.operation.jobs.balanceTarget'] <= 0.0: + if _get('cse.operation.jobs.balanceTarget') <= 0.0: return False, f'Configuration Error: [i]\[cse.operation.jobs]:balanceTarget[/i] must be > 0.0' - if Configuration._configuration['cse.operation.jobs.balanceLatency'] < 0: + if _get('cse.operation.jobs.balanceLatency') < 0: return False, f'Configuration Error: [i]\[cse.operation.jobs]:balanceLatency[/i] must be >= 0' - if Configuration._configuration['cse.operation.jobs.balanceReduceFactor'] < 1.0: + if _get('cse.operation.jobs.balanceReduceFactor') < 1.0: return False, f'Configuration Error: [i]\[cse.operation.jobs]:balanceReduceFactor[/i] must be >= 1.0' @@ -557,65 +679,102 @@ def validate(initial:Optional[bool] = False) -> Tuple[bool, str]: # Some sanity and validity checks # - # TLS & certificates - if not Configuration._configuration['http.security.useTLS']: # clear certificates configuration if not in use - Configuration._configuration['http.security.verifyCertificate'] = False - Configuration._configuration['http.security.tlsVersion'] = 'auto' - Configuration._configuration['http.security.caCertificateFile'] = '' - Configuration._configuration['http.security.caPrivateKeyFile'] = '' + # HTTP TLS & certificates + if not _get('http.security.useTLS'): # clear certificates configuration if not in use + _put('http.security.verifyCertificate', False) + _put('http.security.tlsVersion', 'auto') + _put('http.security.caCertificateFile', '') + _put('http.security.caPrivateKeyFile', '') else: - if not (val := Configuration._configuration['http.security.tlsVersion']).lower() in [ 'tls1.1', 'tls1.2', 'auto' ]: + if not (val := _get('http.security.tlsVersion')).lower() in [ 'tls1.1', 'tls1.2', 'auto' ]: return False, f'Configuration Error: Unknown value for [i]\[http.security]:tlsVersion[/i]: {val}' - if not (val := Configuration._configuration['http.security.caCertificateFile']): + if not (val := _get('http.security.caCertificateFile')): return False, 'Configuration Error: [i]\[http.security]:caCertificateFile[/i] must be set when TLS is enabled' if not os.path.exists(val): return False, f'Configuration Error: [i]\[http.security]:caCertificateFile[/i] does not exists or is not accessible: {val}' - if not (val := Configuration._configuration['http.security.caPrivateKeyFile']): + if not (val := _get('http.security.caPrivateKeyFile')): return False, 'Configuration Error: [i]\[http.security]:caPrivateKeyFile[/i] must be set when TLS is enabled' if not os.path.exists(val): return False, f'Configuration Error: [i]\[http.security]:caPrivateKeyFile[/i] does not exists or is not accessible: {val}' + # HTTP CORS - if initial and Configuration._configuration['http.cors.enable'] and not Configuration._configuration['http.security.useTLS']: + if initial and _get('http.cors.enable') and not _get('http.security.useTLS'): Configuration._print('[orange3]Configuration Warning: [i]\[http.security].useTLS[/i] (https) should be enabled when [i]\[http.cors].enable[/i] is enabled.') + + # HTTP authentication + if _get('http.security.enableBasicAuth') and not _get('http.security.basicAuthFile'): + return False, 'Configuration Error: [i]\[http.security]:httpBasicAuthFile[/i] must be set when HTTP Basic Auth is enabled' + if _get('http.security.enableTokenAuth') and not _get('http.security.tokenAuthFile'): + return False, 'Configuration Error: [i]\[http.security]:httpTokenAuthFile[/i] must be set when HTTP Token Auth is enabled' + + + # HTTP WSGI + if _get('http.wsgi.enable') and _get('http.security.useTLS'): + # WSGI and TLS cannot both be enabled + return False, 'Configuration Error: [i]\[http.security].useTLS[/i] (https) cannot be enabled when [i]\[http.wsgi].enable[/i] is enabled (WSGI and TLS cannot both be enabled).' + if _get('http.wsgi.threadPoolSize') < 1: + return False, 'Configuration Error: [i]\[http.wsgi]:threadPoolSize[/i] must be > 0' + if _get('http.wsgi.connectionLimit') < 1: + return False, 'Configuration Error: [i]\[http.wsgi]:connectionLimit[/i] must be > 0' + # # MQTT client # - if not Configuration._configuration['mqtt.port']: # set the default port depending on whether to use TLS - Configuration._configuration['mqtt.port'] = 8883 if Configuration._configuration['mqtt.security.useTLS'] else 1883 - if not (Configuration._configuration['mqtt.security.username']) != (not Configuration._configuration['mqtt.security.password']): + if not _get('mqtt.port'): # set the default port depending on whether to use TLS + _put('mqtt.port', 8883) if _get('mqtt.security.useTLS') else 1883 + if not _get('mqtt.security.username') != (not _get('mqtt.security.password')): # Hack: != -> either both are empty, or both are set return False, f'Configuration Error: Username or password missing for [i]\[mqtt.security][/i]' # remove empty cid from the list - Configuration._configuration['mqtt.security.allowedCredentialIDs'] = [ cid for cid in Configuration._configuration['mqtt.security.allowedCredentialIDs'] if len(cid) ] + _put('mqtt.security.allowedCredentialIDs', [ cid for cid in _get('mqtt.security.allowedCredentialIDs') if len(cid) ]) + # COAP TLS & certificates + if not _get('coap.security.useDTLS'): # clear certificates configuration if not in use + _put('coap.security.verifyCertificate', False) + _put('coap.security.tlsVersion', 'auto') + _put('coap.security.caCertificateFile', '') + _put('coap.security.caPrivateKeyFile', '') + else: + if not (val := _get('coap.security.dtlsVersion')).lower() in [ 'tls1.1', 'tls1.2', 'auto' ]: + return False, f'Configuration Error: Unknown value for [i]\[coap.security]:dtlsVersion[/i]: {val}' + if not (val := _get('coap.security.certificateFile')): + return False, 'Configuration Error: [i]\[coap.security]:certificateFile[/i] must be set when DTLS is enabled' + if not os.path.exists(val): + return False, f'Configuration Error: [i]\[coap.security]:certificateFile[/i] does not exists or is not accessible: {val}' + if not (val := _get('coap.security.privateKeyFile')): + return False, 'Configuration Error: [i]\[coap.security]:privateKeyFile[/i] must be set when TLS is enabled' + if not os.path.exists(val): + return False, f'Configuration Error: [i]\[coap.security]:privateKeyFile[/i] does not exists or is not accessible: {val}' + + # check the csi format and value - if not isValidCSI(val:=Configuration._configuration['cse.cseID']): + if not isValidCSI(val := _get('cse.cseID')): return False, f'Configuration Error: Wrong format for [i]\[cse]:cseID[/i]: {val}' - if Configuration._configuration['cse.cseID'][1:] == Configuration._configuration['cse.resourceName']: + if _get('cse.cseID')[1:] == _get('cse.resourceName'): return False, f'Configuration Error: [i]\[cse]:cseID[/i] must be different from [i]\[cse]:resourceName[/i]' - if Configuration._configuration['cse.registrar.address'] and Configuration._configuration['cse.registrar.cseID']: - if not isValidCSI(val:=Configuration._configuration['cse.registrar.cseID']): + if _get('cse.registrar.address') and _get('cse.registrar.cseID'): + if not isValidCSI(val := _get('cse.registrar.cseID')): return False, f'Configuration Error: Wrong format for [i]\[cse.registrar]:cseID[/i]: {val}' - if len(Configuration._configuration['cse.registrar.cseID']) > 0 and len(Configuration._configuration['cse.registrar.resourceName']) == 0: + if len(_get('cse.registrar.cseID')) > 0 and len(_get('cse.registrar.resourceName')) == 0: return False, 'Configuration Error: Missing configuration [i]\[cse.registrar]:resourceName[/i]' # Check default subscription duration - if Configuration._configuration['resource.sub.batchNotifyDuration'] < 1: + if _get('resource.sub.batchNotifyDuration') < 1: return False, 'Configuration Error: [i]\[resource.sub]:batchNotifyDuration[/i] must be > 0' # Check flexBlocking value - Configuration._configuration['cse.flexBlockingPreference'] = Configuration._configuration['cse.flexBlockingPreference'].lower() - if Configuration._configuration['cse.flexBlockingPreference'] not in ['blocking', 'nonblocking']: + _put('cse.flexBlockingPreference', _get('cse.flexBlockingPreference').lower()) + if _get('cse.flexBlockingPreference') not in ['blocking', 'nonblocking']: return False, 'Configuration Error: [i]\[cse]:flexBlockingPreference[/i] must be "blocking" or "nonblocking"' # Check release versions - if len(srv := Configuration._configuration['cse.supportedReleaseVersions']) == 0: + if len(srv := _get('cse.supportedReleaseVersions')) == 0: return False, 'Configuration Error: [i]\[cse]:supportedReleaseVersions[/i] must not be empty' - if len(rvi := Configuration._configuration['cse.releaseVersion']) == 0: + if len(rvi := _get('cse.releaseVersion')) == 0: return False, 'Configuration Error: [i]\[cse]:releaseVersion[/i] must not be empty' if rvi not in srv: return False, f'Configuration Error: [i]\[cse]:releaseVersion[/i]: {rvi} not in [i]\[cse].supportedReleaseVersions[/i]: {srv}' @@ -623,33 +782,35 @@ def validate(initial:Optional[bool] = False) -> Tuple[bool, str]: # return False, f'Configuration Error: \[cse]:releaseVersion: {rvi} less than highest value in \[cse].supportedReleaseVersions: {srv}. Either increase the [i]releaseVersion[/i] or reduce the set of [i]supportedReleaseVersions[/i].' # Check various intervals - if Configuration._configuration['cse.checkExpirationsInterval'] <= 0: + if _get('cse.checkExpirationsInterval') <= 0: return False, 'Configuration Error: [i]\[cse]:checkExpirationsInterval[/i] must be > 0' - if Configuration._configuration['console.refreshInterval'] <= 0.0: + if _get('console.refreshInterval') <= 0.0: return False, 'Configuration Error: [i]\[console]:refreshInterval[/i] must be > 0.0' - if Configuration._configuration['cse.maxExpirationDelta'] <= 0: + if _get('cse.maxExpirationDelta') <= 0: return False, 'Configuration Error: [i]\[cse]:maxExpirationDelta[/i] must be > 0' # Console settings from ..services.Console import TreeMode - if isinstance(tm := Configuration._configuration['console.treeMode'], str): + if isinstance(tm := _get('console.treeMode'), str): if not (treeMode := TreeMode.to(tm)): return False, f'Configuration Error: [i]\[console]:treeMode[/i] must be one of {TreeMode.names()}' - Configuration._configuration['console.treeMode'] = treeMode + _put('console.treeMode', treeMode) - Configuration._configuration['console.theme'] = (theme := Configuration._configuration['console.theme'].lower()) + _put('console.theme', (theme := _get('console.theme').lower())) if theme not in [ 'dark', 'light' ]: return False, f'Configuration Error: [i]\[console]:theme[/i] must be "light" or "dark"' - if Configuration._configuration['console.headless']: - Configuration._configuration['logging.enableScreenLogging'] = False - Configuration._configuration['textui.startWithTUI'] = False + if _get('console.headless'): + _put('logging.enableScreenLogging', False) + _put('textui.startWithTUI', False) # Script settings - if Configuration._configuration['scripting.fileMonitoringInterval'] < 0.0: + if _get('scripting.fileMonitoringInterval') < 0.0: return False, f'Configuration Error: [i]\[scripting]:fileMonitoringInterval[/i] must be >= 0.0' - if (scriptDirs := Configuration._configuration['scripting.scriptDirectories']): + if _get('scripting.maxRuntime') < 0.0: + return False, f'Configuration Error: [i]\[scripting]:maxRuntime[/i] must be >= 0.0' + if (scriptDirs := _get('scripting.scriptDirectories')): lst = [] for each in scriptDirs: if not each: @@ -657,22 +818,31 @@ def validate(initial:Optional[bool] = False) -> Tuple[bool, str]: if not os.path.isdir(each): return False, f'Configuration Error: [i]\[scripting]:scriptDirectory[/i]: directory "{each}" does not exist, is not a directory or is not accessible' lst.append(each) - Configuration._configuration['scripting.scriptDirectories'] = lst + _put('scripting.scriptDirectories', lst) # TimeSyncBeacon defaults - bcni = Configuration._configuration['resource.tsb.bcni'] + bcni = _get('resource.tsb.bcni') try: isodate.parse_duration(bcni) except Exception as e: return False, f'Configuration Error: [i]\[resource.tsb]:bcni[/i]: configuration value must be an ISO8601 duration' + # Check group resource defaults + if _get('resource.grp.resultExpirationTime') < 0: + return False, f'Configuration Error: [i]\[resource.grp]:resultExpirationTime[/i] must be >= 0' + # Everything is fine return True, None @staticmethod def print() -> str: + """ Prints the current configuration to the console. + + Returns: + A string with the current configuration. + """ result = 'Configuration:\n' # Magic string used e.g. in tests, don't remove for (k,v) in Configuration._configuration.items(): result += f' {k} = {v}\n' @@ -681,24 +851,49 @@ def print() -> str: @staticmethod def all() -> Dict[str, Any]: + """ Returns the complete configuration as a dictionary. + + Returns: + A dictionary with the complete configuration. + """ return Configuration._configuration @staticmethod def get(key: str) -> Any: """ Retrieve a configuration value or None if no configuration could be found for a key. + + Args: + key: The configuration key to retrieve. + + Returns: + The configuration value or None if no configuration could be found for a key. """ return Configuration._configuration.get(key) @staticmethod def addDoc(key: str, markdown:str) -> None: + """ Adds a documentation for a configuration key. + + Args: + key: The configuration key to add the documentation for. + markdown: The documentation in markdown format. + """ if key: Configuration._configurationDocs[key] = markdown @staticmethod - def getDoc(key:str) -> str|None: + def getDoc(key:str) -> Optional[str]: + """ Retrieves the documentation for a configuration key. + + Args: + key: The configuration key to retrieve the documentation for. + + Returns: + The documentation in markdown format or None if no documentation could be found for the key. + """ return Configuration._configurationDocs.get(key) @@ -706,6 +901,10 @@ def getDoc(key:str) -> str|None: def update(key:str, value:Any) -> Optional[str]: """ Update a configuration value and inform other components via an event. + Args: + key: The configuration key to update. + value: The new value for the configuration key. + Returns: None if no error occurs, or a string with an error message, what has gone wrong while validating """ @@ -728,6 +927,12 @@ def update(key:str, value:Any) -> Optional[str]: @staticmethod def has(key:str) -> bool: """ Check whether a configuration setting exsists. + + Args: + key: The configuration key to check. + + Returns: + True if the configuration key exists, False otherwise. """ return key in Configuration._configuration diff --git a/acme/services/Console.py b/acme/services/Console.py index 2fc0f897..b9526bbc 100644 --- a/acme/services/Console.py +++ b/acme/services/Console.py @@ -358,9 +358,9 @@ def help(self, key:str) -> None: ] table = Table(row_styles = [ '', L.tableRowStyle]) - table.add_column('Key', no_wrap = True, justify = 'left') - table.add_column('Description', no_wrap = True) - table.add_column('Script', no_wrap = True, justify = 'center') + table.add_column('Key', no_wrap = True, justify = 'left', min_width = 10) + table.add_column('Description', no_wrap = False) + table.add_column('Script', no_wrap = True, justify = 'center', min_width = 6) for each in commands: table.add_row(each[0], each[1], '', end_section = each == commands[-1]) @@ -825,7 +825,7 @@ def _plotGraph(self, resource:Resource) -> None: # plot try: - cins = CSE.dispatcher.directChildResources(resource.ri, ResourceTypes.CIN) + cins = CSE.dispatcher.retrieveDirectChildResources(resource.ri, ResourceTypes.CIN) x = range(1, (lcins := len(cins)) + 1) y = [ float(each.con) for each in cins ] cols, rows = plotext.terminal_size() @@ -1260,10 +1260,12 @@ def _stats() -> Table: resourceTypes += f'FCNT : {CSE.dispatcher.countResources(ResourceTypes.FCNT)}\n' resourceTypes += f'FCI : {CSE.dispatcher.countResources(ResourceTypes.FCI)}\n' resourceTypes += f'GRP : {CSE.dispatcher.countResources(ResourceTypes.GRP)}\n' + resourceTypes += f'LCP : {CSE.dispatcher.countResources(ResourceTypes.LCP)}\n' resourceTypes += f'MgmtObj : {CSE.dispatcher.countResources(ResourceTypes.MGMTOBJ)}\n' resourceTypes += f'NOD : {CSE.dispatcher.countResources(ResourceTypes.NOD)}\n' resourceTypes += f'PCH : {CSE.dispatcher.countResources(ResourceTypes.PCH)}\n' resourceTypes += f'REQ : {CSE.dispatcher.countResources(ResourceTypes.REQ)}\n' + resourceTypes += f'SCH : {CSE.dispatcher.countResources(ResourceTypes.SCH)}\n' resourceTypes += f'SMD : {CSE.dispatcher.countResources(ResourceTypes.SMD)}\n' resourceTypes += f'SUB : {CSE.dispatcher.countResources(ResourceTypes.SUB)}\n' resourceTypes += f'TS : {CSE.dispatcher.countResources(ResourceTypes.TS)}\n' @@ -1273,7 +1275,7 @@ def _stats() -> Table: resourceTypes += '\n' resourceTypes += _markup(f'[bold]Total[/bold] : {int(stats[Statistics.resourceCount]) - _virtualCount}\n') # substract the virtual resources # Correct height - resourceTypes += '\n' * (tableWorkers.row_count + 6) + resourceTypes += '\n' * (tableWorkers.row_count + 4) result = Table.grid(expand = True) @@ -1373,36 +1375,39 @@ def info(res:Resource) -> str: if self.treeMode not in [ TreeMode.COMPACT, TreeMode.CONTENTONLY ]: # if res.ty in [ T.FCNT, T.FCI] : # extraInfo = f' (cnd={res.cnd})' - if res.ty in [ ResourceTypes.CIN, ResourceTypes.TS ]: - extraInfo = f' ({res.cnf})' if res.cnf else '' - elif res.ty in [ ResourceTypes.CSEBase, ResourceTypes.CSEBaseAnnc, ResourceTypes.CSR ]: - extraInfo = f' (csi={res.csi})' - + match res.ty: + case ResourceTypes.FCNT | ResourceTypes.FCI: + extraInfo = f' ({res.cnf})' if res.cnf else '' + case ResourceTypes.CSEBase | ResourceTypes.CSEBaseAnnc | ResourceTypes.CSR: + extraInfo = f' (csi={res.csi})' + # Determine content contentInfo = '' if self.treeMode in [ TreeMode.CONTENT, TreeMode.CONTENTONLY ]: - if res.ty in [ ResourceTypes.CIN, ResourceTypes.TSI ]: - contentInfo = f'{res.con}' if res.con else '' - elif res.ty in [ ResourceTypes.FCNT, ResourceTypes.FCI ]: # All the custom attributes - contentInfo = ', '.join([ f'{attr}={str(res[attr])}' for attr in res.dict if CSE.validator.isExtraResourceAttribute(attr, res) ]) + match res.ty: + case ResourceTypes.CIN | ResourceTypes.TSI: + contentInfo = f'{res.con}' if res.con else '' + case ResourceTypes.FCNT | ResourceTypes.FCI: + contentInfo = ', '.join([ f'{attr}={str(res[attr])}' for attr in res.dict if CSE.validator.isExtraResourceAttribute(attr, res) ]) # construct the info info = '' - if self.treeMode == TreeMode.COMPACT: - info = f'-> {res.__rtype__}' - elif self.treeMode == TreeMode.CONTENT: - if len(contentInfo) > 0: - info = f'-> {res.__rtype__}{extraInfo} | {contentInfo}' - else: - info = f'-> {res.__rtype__}{extraInfo}' - elif self.treeMode == TreeMode.CONTENTONLY: - if len(contentInfo) > 0: - info = f'-> {contentInfo}' - else: # self.treeMode == NORMAL - if res.isVirtual(): - info = f'-> {res.__rtype__}{extraInfo} (virtual)' - else: - info = f'-> {res.__rtype__}{extraInfo} | ri={res.ri}' + match self.treeMode: + case TreeMode.COMPACT: + info = f'-> {res.__rtype__}' + case TreeMode.CONTENT: + if len(contentInfo) > 0: + info = f'-> {res.__rtype__}{extraInfo} | {contentInfo}' + else: + info = f'-> {res.__rtype__}{extraInfo}' + case TreeMode.CONTENTONLY: + if len(contentInfo) > 0: + info = f'-> {contentInfo}' + case _: # self.treeMode == NORMAL + if res.isVirtual(): + info = f'-> {res.__rtype__}{extraInfo} (virtual)' + else: + info = f'-> {res.__rtype__}{extraInfo} | ri={res.ri}' return f'{res.rn} [dim]{info}[/dim]' @@ -1417,7 +1422,7 @@ def getChildren(res:Resource, tree:Tree, level:int) -> None: """ if maxLevel > 0 and level == maxLevel: return - chs = CSE.dispatcher.directChildResources(res.ri) + chs = CSE.dispatcher.retrieveDirectChildResources(res.ri) for ch in chs: if ch.isVirtual() and not self.treeIncludeVirtualResources: # Ignore virual resources continue diff --git a/acme/services/Dispatcher.py b/acme/services/Dispatcher.py index 122c2778..477b6f34 100644 --- a/acme/services/Dispatcher.py +++ b/acme/services/Dispatcher.py @@ -6,6 +6,11 @@ # # Most internal requests are routed through here. # +""" Dispatcher module. Handles all requests and dispatches them to the + appropriate handlers. This includes requests for resources, requests + for resource creation, and requests for resource deletion. + Also handles the discovery of resources. +""" from __future__ import annotations from typing import List, Tuple, cast, Sequence, Optional @@ -23,10 +28,12 @@ from ..etc.ResponseStatusCodes import ORIGINATOR_HAS_NO_PRIVILEGE, NOT_FOUND, BAD_REQUEST from ..etc.ResponseStatusCodes import REQUEST_TIMEOUT, OPERATION_NOT_ALLOWED, TARGET_NOT_SUBSCRIBABLE, INVALID_CHILD_RESOURCE_TYPE from ..etc.ResponseStatusCodes import INTERNAL_SERVER_ERROR, SECURITY_ASSOCIATION_REQUIRED, CONFLICT +from ..etc.ResponseStatusCodes import TARGET_NOT_REACHABLE from ..etc.Utils import localResourceID, isSPRelative, isStructured, resourceModifiedAttributes, filterAttributes, riFromID from ..etc.Utils import srnFromHybrid, uniqueRI, noNamespace, riFromStructuredPath, csiFromSPRelative, toSPRelative, structuredPathFromRI from ..helpers.TextTools import findXPath from ..etc.DateUtils import waitFor, timeUntilTimestamp, timeUntilAbsRelTimestamp, getResourceDate +from ..etc.DateUtils import cronMatchesTimestamp from ..services import CSE from ..services.Configuration import Configuration from ..resources.Factory import resourceFromDict @@ -39,6 +46,10 @@ # TODO NOTIFY optimize local resource notifications # TODO handle config update class Dispatcher(object): + """ Dispatcher class. Handles all requests and dispatches them to the + appropriate handlers. This includes requests for resources, requests + for resource creation, and requests for resource deletion. + """ __slots__ = ( 'csiSlashLen', @@ -49,15 +60,24 @@ class Dispatcher(object): '_eventUpdateResource', '_eventDeleteResource', ) + """ Slots of class attributes. """ def __init__(self) -> None: + """ Initialize the Dispatcher. """ + self.csiSlashLen = len(CSE.cseCsiSlash) + """ Length of the CSI with a slash. """ self.sortDiscoveryResources = Configuration.get('cse.sortDiscoveredResources') + """ Sort the discovered resources. """ self._eventCreateResource = CSE.event.createResource # type: ignore [attr-defined] + """ Event handler for resource creation events. """ self._eventCreateChildResource = CSE.event.createChildResource # type: ignore [attr-defined] + """ Event handler for child resource creation events. """ self._eventUpdateResource = CSE.event.updateResource # type: ignore [attr-defined] + """ Event handler for resource update events. """ self._eventDeleteResource = CSE.event.deleteResource # type: ignore [attr-defined] + """ Event handler for resource deletion events. """ L.isInfo and L.log('Dispatcher initialized') @@ -93,8 +113,14 @@ def processRetrieveRequest(self, request:CSERequest, request: The incoming request. originator: The requests originator. id: Optional ID of the request. + Return: Result object. + + Raises: + BAD_REQUEST: If the request is invalid. + ORIGINATOR_HAS_NO_PRIVILEGE: If the originator has no privilege. + INTERNAL_SERVER_ERROR: If an internal error occurred. """ L.isDebug and L.logDebug(f'Process RETRIEVE request for id: {request.id}|{request.srn}') @@ -113,9 +139,11 @@ def processRetrieveRequest(self, request:CSERequest, raise BAD_REQUEST(L.logWarn(f'Only "m2m:atrl" is allowed in Content for RETRIEVE.')) CSE.validator.validateAttribute('atrl', attributeList) - # Handle operation execution time and check request expiration - self._handleOperationExecutionTime(request) - self._checkRequestExpiration(request) + # Handle operation execution time , and check CSE schedule and request expiration + self.handleOperationExecutionTime(request) + self._checkActiveCSESchedule() + self.checkRequestExpiration(request) + self.checkResultExpiration(request) # handle fanout point requests if (fanoutPointResource := self._getFanoutPointResource(srn)) and fanoutPointResource.ty == ResourceTypes.GRP_FOPT: @@ -129,7 +157,6 @@ def processRetrieveRequest(self, request:CSERequest, L.isDebug and L.logDebug(f'Redirecting request : {pollingChannelURIRsrc.getSrn()}') return pollingChannelURIRsrc.handleRetrieveRequest(request, id, originator) - # EXPERIMENTAL # Handle latest and oldest RETRIEVE if (laOlResource := self._latestOldestResource(srn)): # We need to check the srn here @@ -174,57 +201,62 @@ def processRetrieveRequest(self, request:CSERequest, return Result(rsc = ResponseStatusCode.OK, resource = self._resourcesToURIList(_resources, request.drt)) else: - if rcn in [ ResultContentType.attributes, - ResultContentType.attributesAndChildResources, - ResultContentType.childResources, - ResultContentType.attributesAndChildResourceReferences, - ResultContentType.originalResource ]: - resource = self.retrieveResource(id, originator, request) - - if not CSE.security.hasAccess(originator, resource, permission): - raise ORIGINATOR_HAS_NO_PRIVILEGE(L.logDebug(f'originator: {originator} has no {permission} privileges for resource: {resource.ri}')) - - # if rcn == "attributes" then we can return here, whatever the result is - if rcn == ResultContentType.attributes: - resource.willBeRetrieved(originator, request) # resource instance may be changed in this call - - # partial retrieve? - return self._partialFromResource(resource, attributeList) - - # if rcn == original-resource we retrieve the linked resource - if rcn == ResultContentType.originalResource: - # Some checks for resource validity - if not resource.isAnnounced(): - raise BAD_REQUEST(L.logDebug(f'Resource {resource.ri} is not an announced resource')) - if not (lnk := resource.lnk): # no link attribute? - raise INTERNAL_SERVER_ERROR('internal error: missing lnk attribute in target resource') - - # Retrieve and check the linked-to request - linkedResource = self.retrieveResource(lnk, originator, request) - - # Normally, we would do some checks here and call "willBeRetrieved", - # but we don't have to, because the resource is already checked during the - # retrieveResource call by the hosting CSE - # partial retrieve? - return self._partialFromResource(linkedResource, attributeList) - - # - # Semantic query request - # This is indicated by rcn = semantic content - # - if rcn == ResultContentType.semanticContent: - L.isDebug and L.logDebug('Performing semantic discovery / query') - # Validate SPARQL in semanticFilter - CSE.semantic.validateSPARQL(request.fc.smf) - - # Get all accessible semanticDescriptors - resources = self.discoverResources(id, originator, filterCriteria = FilterCriteria(ty = [ResourceTypes.SMD])) - - # Execute semantic query - res = CSE.semantic.executeSPARQLQuery(request.fc.smf, cast(Sequence[SMD], resources)) - L.isDebug and L.logDebug(f'SPARQL query result: {res.data}') - return Result(rsc = ResponseStatusCode.OK, data = { 'm2m:qres' : res.data }) + # We can handle some rcn here directly, but some will be handled after this + match rcn: + case ResultContentType.attributes |\ + ResultContentType.attributesAndChildResources |\ + ResultContentType.childResources |\ + ResultContentType.attributesAndChildResourceReferences|\ + ResultContentType.originalResource: + + resource = self.retrieveResource(id, originator, request) + + if not CSE.security.hasAccess(originator, resource, permission): + raise ORIGINATOR_HAS_NO_PRIVILEGE(L.logDebug(f'originator: {originator} has no {permission} privileges for resource: {resource.ri}')) + + match rcn: + case ResultContentType.attributes: + # if rcn == "attributes" then we can return here, whatever the result is + resource.willBeRetrieved(originator, request) # resource instance may be changed in this call + + # partial retrieve? + return self._partialFromResource(resource, attributeList) + + case ResultContentType.originalResource: + # if rcn == original-resource we retrieve the linked resource + + # Some checks for resource validity + if not resource.isAnnounced(): + raise BAD_REQUEST(L.logDebug(f'Resource {resource.ri} is not an announced resource')) + if not (lnk := resource.lnk): # no link attribute? + raise INTERNAL_SERVER_ERROR('internal error: missing lnk attribute in target resource') + + # Retrieve and check the linked-to request + linkedResource = self.retrieveResource(lnk, originator, request) + + # Normally, we would do some checks here and call "willBeRetrieved", + # but we don't have to, because the resource is already checked during the + # retrieveResource call by the hosting CSE + + # partial retrieve? + return self._partialFromResource(linkedResource, attributeList) + + + case ResultContentType.semanticContent: + # Semantic query request + # This is indicated by rcn = semantic content + L.isDebug and L.logDebug('Performing semantic discovery / query') + # Validate SPARQL in semanticFilter + CSE.semantic.validateSPARQL(request.fc.smf) + + # Get all accessible semanticDescriptors + resources = self.discoverResources(id, originator, filterCriteria = FilterCriteria(ty = [ResourceTypes.SMD])) + + # Execute semantic query + res = CSE.semantic.executeSPARQLQuery(request.fc.smf, cast(Sequence[SMD], resources)) + L.isDebug and L.logDebug(f'SPARQL query result: {res.data}') + return Result(rsc = ResponseStatusCode.OK, data = { 'm2m:qres' : res.data }) # # Discovery request @@ -245,28 +277,29 @@ def processRetrieveRequest(self, request:CSERequest, # Handle more sophisticated RCN # - if rcn == ResultContentType.attributesAndChildResources: - self.resourceTreeDict(allowedResources, resource) # the function call add attributes to the target resource - return Result(rsc = ResponseStatusCode.OK, resource = resource) - - elif rcn == ResultContentType.attributesAndChildResourceReferences: - self._resourceTreeReferences(allowedResources, resource, request.drt, 'ch') # the function call add attributes to the target resource - return Result(rsc = ResponseStatusCode.OK, resource = resource) - - elif rcn == ResultContentType.childResourceReferences: - childResourcesRef = self._resourceTreeReferences(allowedResources, None, request.drt, 'm2m:rrl') - return Result(rsc = ResponseStatusCode.OK, resource = childResourcesRef) - - elif rcn == ResultContentType.childResources: - childResources:JSON = { resource.tpe : {} } # Root resource as a dict with no attribute - self.resourceTreeDict(allowedResources, childResources[resource.tpe]) # Adding just child resources - return Result(rsc = ResponseStatusCode.OK, resource = childResources) + match rcn: + case ResultContentType.attributesAndChildResources: + self.resourceTreeDict(allowedResources, resource) # the function call add attributes to the target resource + return Result(rsc = ResponseStatusCode.OK, resource = resource) + + case ResultContentType.attributesAndChildResourceReferences: + self._resourceTreeReferences(allowedResources, resource, request.drt, 'ch') # the function call add attributes to the target resource + return Result(rsc = ResponseStatusCode.OK, resource = resource) + + case ResultContentType.childResourceReferences: + childResourcesRef = self._resourceTreeReferences(allowedResources, None, request.drt, 'm2m:rrl') + return Result(rsc = ResponseStatusCode.OK, resource = childResourcesRef) - elif rcn == ResultContentType.discoveryResultReferences: # URIList - return Result(rsc = ResponseStatusCode.OK, resource = self._resourcesToURIList(allowedResources, request.drt)) + case ResultContentType.childResources: + childResources:JSON = { resource.tpe : {} } # Root resource as a dict with no attribute + self.resourceTreeDict(allowedResources, childResources[resource.tpe]) # Adding just child resources + return Result(rsc = ResponseStatusCode.OK, resource = childResources) - else: - raise BAD_REQUEST(f'unsuppored rcn: {rcn} for RETRIEVE') + case ResultContentType.discoveryResultReferences: + return Result(rsc = ResponseStatusCode.OK, resource = self._resourcesToURIList(allowedResources, request.drt)) + + case _: + raise BAD_REQUEST(f'unsuppored rcn: {rcn} for RETRIEVE') def retrieveResource(self, id:str, @@ -281,9 +314,9 @@ def retrieveResource(self, id:str, If no, then try to retrieve the resource from a connected (!) remote CSE. originator: The originator of the request. postRetrieveHook: Only when retrieving localls, invoke the Resource's *willBeRetrieved()* callback. + Return: Result instance. - """ if id: if id.startswith(CSE.cseCsiSlash) and len(id) > self.csiSlashLen: # TODO for all operations? @@ -310,6 +343,20 @@ def retrieveLocalResource(self, ri:Optional[str] = None, srn:Optional[str] = None, originator:Optional[str] = None, request:Optional[CSERequest] = None) -> Resource: + """ Retrieve a resource locally. + + Args: + ri: The resource ID. + srn: The structured resource name. + originator: The originator of the request. + request: The request. + + Return: + The retrieved resource. + + Raises: + NOT_FOUND: If the resource cannot be found. + """ L.isDebug and L.logDebug(f'Retrieve local resource: {ri}|{srn} for originator: {originator}') if ri: @@ -341,6 +388,18 @@ def discoverResources(self, filterCriteria:Optional[FilterCriteria] = None, rootResource:Optional[Resource] = None, permission:Optional[Permission] = Permission.DISCOVERY) -> List[Resource]: + """ Discover resources. This is the main function for resource discovery. + + Args: + id: The ID of the resource to start discovery from. + originator: The originator of the request. + filterCriteria: The filter criteria. + rootResource: The root resource for discovery. + permission: The permission to use. + + Return: + A list of discovered resources. + """ L.isDebug and L.logDebug('Discovering resources') if not rootResource: @@ -356,7 +415,7 @@ def discoverResources(self, lim:int = filterCriteria.lim if filterCriteria.lim is not None else sys.maxsize # get all direct children and slice the page (offset and limit) - dcrs = self.directChildResources(id)[ofst-1:ofst-1 + lim] # now dcrs only contains the desired child resources for ofst and lim + dcrs = self.retrieveDirectChildResources(id)[ofst-1:ofst-1 + lim] # now dcrs only contains the desired child resources for ofst and lim # a bit of optimization. This length stays the same. allLen = len(filterCriteria.attributes) if filterCriteria.attributes else 0 @@ -404,12 +463,27 @@ def _discoverResources(self, rootResource:Resource, dcrs:Optional[list[Resource]] = None, filterCriteria:Optional[FilterCriteria] = None, permission:Optional[Permission] = Permission.DISCOVERY) -> list[Resource]: + """ Discover resources recursively. This is a helper function for discoverResources(). + + Args: + rootResource: The root resource for discovery. + originator: The originator of the request. + level: The level of discovery. + fo: The filter operation. + allLen: The length of all filter criteria. + dcrs: The direct child resources of the root resource. + filterCriteria: The filter criteria. + permission: The permission to use. + + Return: + A list of discovered resources. + """ if not rootResource or level == 0: # no resource or level == 0 return [] # get all direct children, if not provided if not dcrs: - if len(dcrs := self.directChildResources(rootResource.ri)) == 0: + if len(dcrs := self.retrieveDirectChildResources(rootResource.ri)) == 0: return [] @@ -522,6 +596,11 @@ def _matchResource(self, r:Resource, fo:int, allLen:int, filterCriteria:FilterCr if filterCriteria.aq: found += 1 if CSE.script.runComparisonQuery(filterCriteria.aq, r) else 0 + # Geo query + if filterCriteria.geom: # Just check one of the tree required attributes. If one is there, all are there + allLen += 1 # Add one more criteria to check to the required count + if r.loc: # Only check if the resource has a location + found += 1 if CSE.location.checkGeoLocation(r, filterCriteria.gmty, filterCriteria._geom, filterCriteria.gsf) else 0 # L.isDebug and L.logDebug(f'fo: {fo}, found: {found}, allLen: {allLen}') # Test whether the OR or AND criteria is fullfilled @@ -547,8 +626,17 @@ def processCreateRequest(self, request:CSERequest, request: The incoming request. originator: The requests originator. id: Optional ID of the request. + Return: Result object. + + Raises: + BAD_REQUEST: If the request is invalid. + NOT_FOUND: If the resource cannot be found. + OPERATION_NOT_ALLOWED: If the operation is not allowed. + SECURITY_ASSOCIATION_REQUIRED: If a security association is required. + ORIGINATOR_HAS_NO_PRIVILEGE: If the originator has no privilege. + CONFLICT: If the resource already exists. """ L.isDebug and L.logDebug(f'Process CREATE request for id: {request.id}|{request.srn}') @@ -562,9 +650,11 @@ def processCreateRequest(self, request:CSERequest, # return Result.errorResult(rsc = RC.notFound, dbg = L.logDebug('resource not found')) raise NOT_FOUND(L.logDebug('resource not found')) - # Handle operation execution time and check request expiration - self._handleOperationExecutionTime(request) - self._checkRequestExpiration(request) + # Handle operation execution time, and check CSE schedule and request expiration + self.handleOperationExecutionTime(request) + self._checkActiveCSESchedule() + self.checkRequestExpiration(request) + self.checkResultExpiration(request) # handle fanout point requests if (fanoutPointRsrc := self._getFanoutPointResource(srn)) and fanoutPointRsrc.ty == ResourceTypes.GRP_FOPT: @@ -580,7 +670,7 @@ def processCreateRequest(self, request:CSERequest, # Get parent resource and check permissions L.isDebug and L.logDebug(f'Get parent resource and check permissions: {id}') - parentResource = CSE.dispatcher.retrieveResource(id) + parentResource = self.retrieveResource(id) if not CSE.security.hasAccess(originator, parentResource, Permission.CREATE, ty = ty, parentResource = parentResource): if ty == ResourceTypes.AE: @@ -614,7 +704,7 @@ def processCreateRequest(self, request:CSERequest, # Create the resource. If this fails we de-register everything try: - _resource = CSE.dispatcher.createLocalResource(newResource, parentResource, originator, request = request) + _resource = self.createLocalResource(newResource, parentResource, originator, request = request) except ResponseException as e: CSE.registration.checkResourceDeletion(newResource) # deregister resource. Ignore result, we take this from the creation raise e @@ -626,29 +716,32 @@ def processCreateRequest(self, request:CSERequest, # Handle RCN's # tpe = _resource.tpe - rcn = request.rcn - if rcn is None or rcn == ResultContentType.attributes: # Just the resource & attributes, integer - return Result(rsc = ResponseStatusCode.CREATED, resource = _resource) - - elif rcn == ResultContentType.modifiedAttributes: - dictOrg = request.pc[tpe] - dictNew = _resource.asDict()[tpe] - return Result(resource = { tpe : resourceModifiedAttributes(dictOrg, dictNew, request.pc[tpe]) }, - rsc = ResponseStatusCode.CREATED) - - elif rcn == ResultContentType.hierarchicalAddress: - return Result(resource = { 'm2m:uri' : _resource.structuredPath() }, - rsc = ResponseStatusCode.CREATED) - elif rcn == ResultContentType.hierarchicalAddressAttributes: - return Result(resource = { 'm2m:rce' : { noNamespace(tpe) : _resource.asDict()[tpe], 'uri' : _resource.structuredPath() }}, - rsc = ResponseStatusCode.CREATED) - - elif rcn == ResultContentType.nothing: - return Result(rsc = ResponseStatusCode.CREATED) + match request.rcn: + case None | ResultContentType.attributes: + # Just the resource & attributes, integer + return Result(rsc = ResponseStatusCode.CREATED, resource = _resource) + + case ResultContentType.modifiedAttributes: + dictOrg = request.pc[tpe] + dictNew = _resource.asDict()[tpe] + return Result(resource = { tpe : resourceModifiedAttributes(dictOrg, dictNew, request.pc[tpe]) }, + rsc = ResponseStatusCode.CREATED) + + case ResultContentType.hierarchicalAddress: + return Result(resource = { 'm2m:uri' : _resource.structuredPath() }, + rsc = ResponseStatusCode.CREATED) + + case ResultContentType.hierarchicalAddressAttributes: + return Result(resource = { 'm2m:rce' : { noNamespace(tpe) : _resource.asDict()[tpe], 'uri' : _resource.structuredPath() }}, + rsc = ResponseStatusCode.CREATED) + + case ResultContentType.nothing: + return Result(rsc = ResponseStatusCode.CREATED) + + case _: + raise BAD_REQUEST('wrong rcn for CREATE') - else: - raise BAD_REQUEST('wrong rcn for CREATE') # TODO C.rcnDiscoveryResultReferences @@ -656,7 +749,22 @@ def createResourceFromDict(self, dct:JSON, parentID:str, ty:ResourceTypes, originator:str) -> Tuple[str, str, str]: - # TODO doc + """ Create a resource from a JSON dictionary. + + Args: + dct: The dictionary. + parentID: The parent ID. + ty: The resource type. + originator: The originator. + + Return: + A tuple of (resource ID, CSE-ID, parent ID). + + Raises: + INTERNAL_SERVER_ERROR: If an unknown/unsupported RSC is returned. + ORIGINATOR_HAS_NO_PRIVILEGE: If the originator has no privilege. + + """ # Create locally if (pID := localResourceID(parentID)) is not None: L.isDebug and L.logDebug(f'Creating local resource with ID: {pID} originator: {originator}') @@ -686,7 +794,8 @@ def createResourceFromDict(self, dct:JSON, res = CSE.request.handleSendRequest(CSERequest(to = (pri := toSPRelative(parentID)), originator = originator, ty = ty, - pc = dct) + pc = dct, + op = Operation.CREATE) )[0].result # there should be at least one result # The request might have gone through normally and returned, but might still have failed on the remote CSE. @@ -694,7 +803,7 @@ def createResourceFromDict(self, dct:JSON, if res.rsc != ResponseStatusCode.CREATED: _exc = exceptionFromRSC(res.rsc) # Get exception class from rsc if _exc: - raise _exc(dbg = res.request.pc.get('dbg')) # type:ignore[call-arg] + raise _exc(res.request.pc.get('dbg')) # type:ignore[call-arg] raise INTERNAL_SERVER_ERROR(f'unknown/unsupported RSC: {res.rsc}') resRi = findXPath(res.request.pc, '{*}/ri') @@ -706,12 +815,27 @@ def createResourceFromDict(self, dct:JSON, def createLocalResource(self, resource:Resource, - parentResource:Resource = None, + parentResource:Resource, originator:Optional[str] = None, request:Optional[CSERequest] = None) -> Resource: + """ Create a resource locally. + + Args: + resource: The resource to create. + parentResource: The parent resource. + originator: The originator of the request. + request: The request. + + Return: + The created resource. + + Raises: + TARGET_NOT_SUBSCRIBABLE: If the parent resource is not subscribable. + INVALID_CHILD_RESOURCE_TYPE: If the child resource type is invalid. + """ L.isDebug and L.logDebug(f'CREATING resource ri: {resource.ri}, type: {resource.ty}') - if parentResource: + if parentResource: # parentResource might be None if this is the root resource L.isDebug and L.logDebug(f'Parent ri: {parentResource.ri}') if not parentResource.canHaveChild(resource): if resource.ty == ResourceTypes.SUB: @@ -777,8 +901,15 @@ def processUpdateRequest(self, request:CSERequest, request: The incoming request. originator: The requests originator. id: Optional ID of the request. + Return: Result object. + + Raises: + BAD_REQUEST: If the request is invalid. + NOT_FOUND: If the resource cannot be found. + OPERATION_NOT_ALLOWED: If the operation is not allowed. + ORIGINATOR_HAS_NO_PRIVILEGE: If the originator has no privilege. """ L.isDebug and L.logDebug(f'Process UPDATE request for id: {request.id}|{request.srn}') @@ -792,9 +923,11 @@ def processUpdateRequest(self, request:CSERequest, if not id and not fopsrn: raise NOT_FOUND(L.logDebug('resource not found')) - # Handle operation execution time and check request expiration - self._handleOperationExecutionTime(request) - self._checkRequestExpiration(request) + # Handle operation execution time , and check CSE schedule and request expiration + self.handleOperationExecutionTime(request) + self._checkActiveCSESchedule() + self.checkRequestExpiration(request) + self.checkResultExpiration(request) # handle fanout point requests if (fanoutPointResource := self._getFanoutPointResource(fopsrn)) and fanoutPointResource.ty == ResourceTypes.GRP_FOPT: @@ -834,25 +967,28 @@ def processUpdateRequest(self, request:CSERequest, # tpe = resource.tpe - if request.rcn is None or request.rcn == ResultContentType.attributes: # rcn is an int - return Result(rsc = ResponseStatusCode.UPDATED, resource = resource) + + match request.rcn: + case None | ResultContentType.attributes: + return Result(rsc = ResponseStatusCode.UPDATED, resource = resource) + + case ResultContentType.modifiedAttributes: + dictNew = deepcopy(resource.dict) + requestPC = request.pc[tpe] + # return only the modified attributes. This does only include those attributes that are updated differently, or are + # changed by the CSE, then from the original request. Luckily, all key/values that are touched in the update request + # are in the resource's __modified__ variable. + return Result(rsc = ResponseStatusCode.UPDATED, + resource = { tpe : resourceModifiedAttributes(dictOrg, dictNew, requestPC, modifiers = resource[Constants.attrModified]) }) + + case ResultContentType.nothing: + return Result(rsc = ResponseStatusCode.UPDATED) - elif request.rcn == ResultContentType.modifiedAttributes: - dictNew = deepcopy(resource.dict) - requestPC = request.pc[tpe] - # return only the modified attributes. This does only include those attributes that are updated differently, or are - # changed by the CSE, then from the original request. Luckily, all key/values that are touched in the update request - # are in the resource's __modified__ variable. - return Result(rsc = ResponseStatusCode.UPDATED, - resource = { tpe : resourceModifiedAttributes(dictOrg, dictNew, requestPC, modifiers = resource[Constants.attrModified]) }) - elif request.rcn == ResultContentType.nothing: - return Result(rsc = ResponseStatusCode.UPDATED) + case _: + raise BAD_REQUEST('wrong rcn for UPDATE') # TODO C.rcnDiscoveryResultReferences - else: - raise BAD_REQUEST('wrong rcn for UPDATE') - def updateLocalResource(self, resource:Resource, dct:Optional[JSON] = None, @@ -865,6 +1001,7 @@ def updateLocalResource(self, resource:Resource, dct: JSON dictionary with the updated attributes. doUpdateCheck: Enable/disable a call to update(). originator: The request's originator. + Return: Updated resource. """ @@ -888,7 +1025,21 @@ def updateResourceFromDict(self, dct:JSON, id:str, originator:Optional[str] = None, resource:Optional[Resource] = None) -> Resource: - # TODO doc + """ Update a resource from a JSON dictionary. + + Args: + dct: The dictionary. + id: The resource ID. + originator: The originator. + resource: The resource to update. + + Return: + The updated resource. + + Raises: + INTERNAL_SERVER_ERROR: If the resource cannot be updated. + ORIGINATOR_HAS_NO_PRIVILEGE: If the originator has no UPDATE privileges. + """ # Update locally if (rID := localResourceID(id)) is not None: @@ -919,7 +1070,7 @@ def updateResourceFromDict(self, dct:JSON, if result.rsc != ResponseStatusCode.UPDATED: _exc = exceptionFromRSC(result.rsc) # Get exception class from rsc if _exc: - raise _exc(dbg = result.request.pc.get('dbg')) # type:ignore[call-arg] + raise _exc(result.request.pc.get('dbg')) # type:ignore[call-arg] raise INTERNAL_SERVER_ERROR(f'unknown/unsupported RSC: {result.rsc}') updatedResource = result.resource @@ -942,8 +1093,13 @@ def processDeleteRequest(self, request:CSERequest, request: The incoming request. originator: The requests originator. id: Optional ID of the request. + Return: Result object. + + Raises: + NOT_FOUND: If the resource cannot be found. + ORIGINATOR_HAS_NO_PRIVILEGE: If the originator has no privilege. """ L.isDebug and L.logDebug(f'Process DELETE request for id: {request.id}|{request.srn}') @@ -957,9 +1113,11 @@ def processDeleteRequest(self, request:CSERequest, if not id and not fopsrn: raise NOT_FOUND(L.logDebug('resource not found')) - # Handle operation execution time and check request expiration - self._handleOperationExecutionTime(request) - self._checkRequestExpiration(request) + # Handle operation execution time , and check CSE schedule and request expiration + self.handleOperationExecutionTime(request) + self._checkActiveCSESchedule() + self.checkRequestExpiration(request) + self.checkResultExpiration(request) # handle fanout point requests if (fanoutPointRsrc := self._getFanoutPointResource(fopsrn)) and fanoutPointRsrc.ty == ResourceTypes.GRP_FOPT: @@ -981,38 +1139,42 @@ def processDeleteRequest(self, request:CSERequest, # resultContent:Resource|JSON = None - if request.rcn is None or request.rcn == ResultContentType.nothing: # rcn is an int - resultContent = None - - elif request.rcn == ResultContentType.attributes: - resultContent = resource - - # resource and child resources, full attributes - elif request.rcn == ResultContentType.attributesAndChildResources: - children = self.discoverChildren(id, resource, originator, request.fc, Permission.DELETE) - self._childResourceTree(children, resource) # the function call add attributes to the result resource. Don't use the return value directly - resultContent = resource - - # direct child resources, NOT the root resource - elif request.rcn == ResultContentType.childResources: - children = self.discoverChildren(id, resource, originator, request.fc, Permission.DELETE) - childResources:JSON = { resource.tpe : {} } # Root resource as a dict with no attributes - self.resourceTreeDict(children, childResources[resource.tpe]) - resultContent = childResources - - elif request.rcn == ResultContentType.attributesAndChildResourceReferences: - children = self.discoverChildren(id, resource, originator, request.fc, Permission.DELETE) - self._resourceTreeReferences(children, resource, request.drt, 'ch') # the function call add attributes to the result resource - resultContent = resource - - elif request.rcn == ResultContentType.childResourceReferences: # child resource references - children = self.discoverChildren(id, resource, originator, request.fc, Permission.DELETE) - childResourcesRef = self._resourceTreeReferences(children, None, request.drt, 'm2m:rrl') - resultContent = childResourcesRef + match request.rcn: + case None | ResultContentType.nothing: + resultContent = None + + case ResultContentType.attributes: + resultContent = resource + + case ResultContentType.attributesAndChildResources: + # resource and child resources, full attributes + children = self.discoverChildren(id, resource, originator, request.fc, Permission.DELETE) + self._childResourceTree(children, resource) # the function call add attributes to the result resource. Don't use the return value directly + resultContent = resource + + case ResultContentType.childResources: + # direct child resources, NOT the root resource + children = self.discoverChildren(id, resource, originator, request.fc, Permission.DELETE) + childResources:JSON = { resource.tpe : {} } # Root resource as a dict with no attributes + self.resourceTreeDict(children, childResources[resource.tpe]) + resultContent = childResources + + case ResultContentType.attributesAndChildResourceReferences: + # resource and child resource references + children = self.discoverChildren(id, resource, originator, request.fc, Permission.DELETE) + self._resourceTreeReferences(children, resource, request.drt, 'ch') # the function call add attributes to the result resource + resultContent = resource + + case ResultContentType.childResourceReferences: + # direct child resource references, NOT the root resource + children = self.discoverChildren(id, resource, originator, request.fc, Permission.DELETE) + childResourcesRef = self._resourceTreeReferences(children, None, request.drt, 'm2m:rrl') + resultContent = childResourcesRef + + case _: + raise BAD_REQUEST('wrong rcn for DELETE') # TODO RCN.discoveryResultReferences - else: - raise BAD_REQUEST('wrong rcn for DELETE') # remove resource self.deleteLocalResource(resource, originator, withDeregistration = True) @@ -1028,6 +1190,15 @@ def deleteLocalResource(self, resource:Resource, withDeregistration:Optional[bool] = False, parentResource:Optional[Resource] = None, doDeleteCheck:Optional[bool] = True) -> None: + """ Delete a resource from the CSE. Call deactivate() and deleted() callbacks on the resource. + + Args: + resource: The resource to delete. + originator: The originator of the request. + withDeregistration: If True, deregister the resource. + parentResource: The parent resource. + doDeleteCheck: If True, call childRemoved() on the parent resource. + """ L.isDebug and L.logDebug(f'Removing resource ri: {resource.ri}, type: {resource.ty}') resource.deactivate(originator) # deactivate it first @@ -1057,8 +1228,17 @@ def deleteLocalResource(self, resource:Resource, def deleteResource(self, id:str, originator:Optional[str] = None) -> None: - # TODO doc + """ Delete a resource from the CSE. + Args: + id: The resource ID to delete. + originator: The originator of the request. Defaults to None. + + Raises: + OPERATION_NOT_ALLOWED: If the resource is a CSEBase resource. + NOT_FOUND: If the resource is not found. + ORIGINATOR_HAS_NO_PRIVILEGE: If the originator has no DELETE access to the resource. + """ # Update locally if (rID := localResourceID(id)) is not None: @@ -1068,7 +1248,7 @@ def deleteResource(self, id:str, originator:Optional[str] = None) -> None: resource = self.retrieveLocalResource(rID, originator = originator) if id in [ CSE.cseRi, CSE.cseRi, CSE.cseRn ]: - raise OPERATION_NOT_ALLOWED(dbg = 'DELETE operation is not allowed for CSEBase') + raise OPERATION_NOT_ALLOWED('DELETE operation is not allowed for CSEBase') # Check Permission if not CSE.security.hasAccess(originator, resource, Permission.DELETE): @@ -1089,7 +1269,7 @@ def deleteResource(self, id:str, originator:Optional[str] = None) -> None: if res.rsc != ResponseStatusCode.DELETED: _exc = exceptionFromRSC(res.rsc) # Get exception class from rsc if _exc: - raise _exc(dbg = res.request.pc.get('dbg')) # type:ignore[call-arg] + raise _exc(res.request.pc.get('dbg')) # type:ignore[call-arg] raise INTERNAL_SERVER_ERROR(f'unknown/unsupported RSC: {res.rsc}') @@ -1107,8 +1287,13 @@ def processNotifyRequest(self, request:CSERequest, request: The incoming request. originator: The requests originator. id: Optional ID of the request. + Return: Result object. + + Raises: + BAD_REQUEST: If the request is invalid. + ORIGINATOR_HAS_NO_PRIVILEGE: If the originator has no privilege. """ L.isDebug and L.logDebug(f'Process NOTIFY request for id: {request.id}|{request.srn}') @@ -1118,9 +1303,11 @@ def processNotifyRequest(self, request:CSERequest, srn, id = self._checkHybridID(request, id) # overwrite id if another is given - # Handle operation execution time and check request expiration - self._handleOperationExecutionTime(request) - self._checkRequestExpiration(request) + # Handle operation execution time, and check CSE schedule and request expiration + self.handleOperationExecutionTime(request) + self._checkActiveCSESchedule() + self.checkRequestExpiration(request) + self.checkResultExpiration(request) # get resource to be notified and check permissions targetResource = self.retrieveResource(id) @@ -1159,7 +1346,19 @@ def processNotifyRequest(self, request:CSERequest, def notifyLocalResource(self, ri:str, originator:str, content:JSON) -> Result: - # TODO doc + """ Notify a local resource. + + Args: + ri: The resourceIdentifier of the resource to notify. + originator: The originator of the request. + content: The notification content. + + Return: + Result object. + + Raises: + ORIGINATOR_HAS_NO_PRIVILEGE: If the originator has no NOTIFY access to the resource. + """ L.isDebug and L.logDebug(f'Sending NOTIFY to local resource: {ri}') resource = self.retrieveLocalResource(ri, originator = originator) @@ -1181,7 +1380,7 @@ def notifyLocalResource(self, ri:str, originator) return Result(rsc = ResponseStatusCode.OK) except ResponseException as e: - L.isWarn and L.logWarn(f'error handling notificatuin: {e.dbg}') + L.isWarn and L.logWarn(f'error handling notification: {e.dbg}') raise @@ -1191,24 +1390,45 @@ def notifyLocalResource(self, ri:str, # Public Utility methods # - def directChildResources(self, pi:str, - ty:Optional[ResourceTypes] = None) -> list[Resource]: + def retrieveDirectChildResources(self, pi:str, + ty:Optional[ResourceTypes|list[ResourceTypes]] = None) -> list[Resource]: """ Return all child resources of a resource, optionally filtered by type. An empty list is returned if no child resource could be found. + + Args: + pi: The parent's resourceIdentifier. + ty: The resource type or list of resource types to filter for. + + Return: + A list of retrieved `Resource` objects. This list might be empty. """ return cast(List[Resource], CSE.storage.directChildResources(pi, ty)) def directChildResourcesRI(self, pi:str, - ty:Optional[ResourceTypes] = None) -> list[str]: + ty:Optional[ResourceTypes|list[ResourceTypes]] = None) -> list[str]: """ Return the resourceIdentifiers of all child resources of a resource, optionally filtered by type. An empty list is returned if no child resource could be found. + + Args: + pi: The parent's resourceIdentifier. + ty: The resource type or list of resource types to filter for. + + Return: + A list of retrieved resourceIdentifiers. This list might be empty. """ return CSE.storage.directChildResourcesRI(pi, ty) def countDirectChildResources(self, pi:str, ty:Optional[ResourceTypes] = None) -> int: """ Return the number of all child resources of resource, optionally filtered by type. + + Args: + pi: The parent's resourceIdentifier. + ty: The resource type to filter for. + + Return: + Number of child resources. """ return CSE.storage.countDirectChildResources(pi, ty) @@ -1216,6 +1436,13 @@ def countDirectChildResources(self, pi:str, ty:Optional[ResourceTypes] = None) - def hasDirectChildResource(self, pi:str, ri:str) -> bool: """ Check if a resource has a direct child resource with a given resourceID + + Args: + pi: The parent's resourceIdentifier. + ri: The resourceIdentifier to check for. + + Return: + True if a direct child resource with the given resourceIdentifier exists, False otherwise. """ return riFromID(ri) in self.directChildResourcesRI(pi) @@ -1264,7 +1491,18 @@ def discoverChildren(self, id:str, originator:str, filterCriteria:FilterCriteria, permission:Permission) -> Optional[list[Resource]]: - # TODO documentation + """ Discover child resources of a resource. + + Args: + id: The resourceIdentifier of the resource to discover the children for. + resource: The resource to discover the children for. + originator: The originator of the request. + filterCriteria: The filter criteria to use. + permission: The permission to check. + + Return: + A list of child resources. This list might be empty. + """ resources = self.discoverResources(id, originator, filterCriteria = filterCriteria, rootResource = resource, permission = permission) # check and filter by ACP @@ -1326,7 +1564,7 @@ def retrieveResourceWithPermission(self, ri:str, originator:str, permission:Perm `ORIGINATOR_HAS_NO_PRIVILEGE`: In case the originator has not the required permission to the resoruce. """ - resource = CSE.dispatcher.retrieveResource(riFromID(ri), originator) + resource = self.retrieveResource(riFromID(ri), originator) if not CSE.security.hasAccess(originator, resource, permission): raise ORIGINATOR_HAS_NO_PRIVILEGE(L.logDebug(f'originator has no access to the resource: {ri}')) return resource @@ -1341,7 +1579,7 @@ def deleteChildResources(self, parentResource:Resource, If *ty* is set only the resources of this type are removed. """ # Remove directChildResources - rs = self.directChildResources(parentResource.ri) + rs = self.retrieveDirectChildResources(parentResource.ri) for r in rs: if ty is None or r.ty == ty: # ty is an int #parentResource.childRemoved(r, originator) # recursion here @@ -1352,7 +1590,7 @@ def deleteChildResources(self, parentResource:Resource, # Request execution utilities # - def _handleOperationExecutionTime(self, request:CSERequest) -> None: + def handleOperationExecutionTime(self, request:CSERequest) -> None: """ Handle operation execution time and request expiration. If the OET is set then wait until the provided timestamp is reached. @@ -1367,7 +1605,7 @@ def _handleOperationExecutionTime(self, request:CSERequest) -> None: waitFor(delay) - def _checkRequestExpiration(self, request:CSERequest) -> None: + def checkRequestExpiration(self, request:CSERequest) -> None: """ Check request expiration timeout if a request timeout is give. Args: @@ -1377,7 +1615,39 @@ def _checkRequestExpiration(self, request:CSERequest) -> None: `REQUEST_TIMEOUT`: In case the request is expired """ if request._rqetUTCts is not None and timeUntilTimestamp(request._rqetUTCts) <= 0.0: - raise REQUEST_TIMEOUT(L.logDebug('request timed out')) + raise REQUEST_TIMEOUT(L.logDebug('request timed out reached')) + + + def checkResultExpiration(self, request:CSERequest) -> None: + """ Check result expiration timeout if a result timeout is given. + + Args: + request: The request to check. + + Raises: + `REQUEST_TIMEOUT`: In case the result is expired + `BAD_REQUEST`: In case the request expiration timestamp is greater than the result expiration timestamp. + """ + if not request.rset: + return + if timeUntilTimestamp(request._rsetUTCts) <= 0.0: + raise REQUEST_TIMEOUT(L.logDebug('result timed out reached')) + if request.rqet is not None and request._rsetUTCts < request._rqetUTCts: + raise BAD_REQUEST(L.logDebug('result expiration timestamp must be greater than request expiration timestamp'), data = request) + + + def _checkActiveCSESchedule(self) -> None: + """ Check if the CSE is currently active according to its schedule. + + Raises: + `TARGET_NOT_REACHABLE`: In case the CSE is not active. + """ + if CSE.cseActiveSchedule: + for s in CSE.cseActiveSchedule: + if cronMatchesTimestamp(s): + return + # TODO not sure if this is the right error code + raise TARGET_NOT_REACHABLE(L.logDebug('request exection time outside of CSE\'s allowed schedule')) @@ -1442,7 +1712,17 @@ def _resourceTreeReferences(self, resources:list[Resource], drt:Optional[DesiredIdentifierResultType] = DesiredIdentifierResultType.structured, tp:Optional[str] = 'm2m:rrl') -> Resource|JSON: """ Retrieve child resource references of a resource and add them to - a new target resource as "children" """ + a **new** target resource instance as "children" + + Args: + resources: A list of resources to retrieve the child resource references from. + targetResource: The target resource to add the child resource references to. + drt: Either structured or unstructured. Defaults to structured. + tp: The type of the target resource. Defaults to 'm2m:rrl'. + + Return: + The target resource with the added child resource references. + """ if not targetResource: targetResource = { } @@ -1472,8 +1752,14 @@ def _resourceTreeReferences(self, resources:list[Resource], return targetResource - # Retrieve full child resources of a resource and add them to a new target resource def _childResourceTree(self, resources:list[Resource], targetResource:Resource|JSON) -> None: + """ Retrieve child resources of a resource and add them to + a **new** target resource instance as "children" + + Args: + resources: A list of resources to retrieve the child resources from. + targetResource: The target resource to add the child resources to. + """ if len(resources) == 0: return result:JSON = {} @@ -1520,7 +1806,7 @@ def _getPollingChannelURIResource(self, id:str) -> Optional[PCH_PCU]: if not (id := structuredPathFromRI(id)): return None - resource = CSE.dispatcher.retrieveResource(id) + resource = self.retrieveResource(id) if resource.ty == ResourceTypes.PCH_PCU: return cast(PCH_PCU, resource) @@ -1552,7 +1838,7 @@ def _getFanoutPointResource(self, id:str) -> Optional[Resource]: if nid: try: - return CSE.dispatcher.retrieveResource(nid) + return self.retrieveResource(nid) except: pass return None @@ -1573,13 +1859,25 @@ def _latestOldestResource(self, id:str) -> Optional[Resource]: if not isStructured(id): if not (id := structuredPathFromRI(id)): return None - if (resource := CSE.dispatcher.retrieveResource(id)) and ResourceTypes.isLatestOldestResource(resource.ty): + if (resource := self.retrieveResource(id)) and ResourceTypes.isLatestOldestResource(resource.ty): return resource # Fallthrough return None def _partialFromResource(self, resource:Resource, attributeList:JSON) -> Result: + """ Filter attributes from a resource. + + Args: + resource: The resource to filter the attributes from. + attributeList: The list of attributes to filter. + + Return: + A Result object with the filtered resource. + + Raises: + BAD_REQUEST: In case an attribute is not defined for the resource. + """ if attributeList: # Validate that the attribute(s) are actual resouce attributes for a in attributeList: diff --git a/acme/services/GroupManager.py b/acme/services/GroupManager.py index 63a27e34..ce3c7569 100644 --- a/acme/services/GroupManager.py +++ b/acme/services/GroupManager.py @@ -10,21 +10,22 @@ """ This module implements the group service manager functionality. """ from __future__ import annotations -from typing import cast, List +from typing import cast, List, Optional, Any from ..etc.Types import ResourceTypes, Result, ConsistencyStrategy, Permission, Operation -from ..etc.Types import CSERequest, JSON +from ..etc.Types import CSERequest, JSON, ResponseType from ..etc.ResponseStatusCodes import MAX_NUMBER_OF_MEMBER_EXCEEDED, INVALID_ARGUMENTS, NOT_FOUND, RECEIVER_HAS_NO_PRIVILEGES -from ..etc.ResponseStatusCodes import ResponseStatusCode, GROUP_MEMBER_TYPE_INCONSISTENT, ORIGINATOR_HAS_NO_PRIVILEGE +from ..etc.ResponseStatusCodes import ResponseStatusCode, GROUP_MEMBER_TYPE_INCONSISTENT, ORIGINATOR_HAS_NO_PRIVILEGE, REQUEST_TIMEOUT from ..etc.Utils import isSPRelative, csiFromSPRelative, structuredPathFromRI +from ..etc.DateUtils import utcTime from ..resources.FCNT import FCNT from ..resources.MgmtObj import MgmtObj from ..resources.Resource import Resource from ..resources.GRP_FOPT import GRP_FOPT -from ..resources.GRP import GRP from ..resources.Factory import resourceFromDict from ..services import CSE from ..services.Logging import Logging as L +from ..services.Configuration import Configuration class GroupManager(object): @@ -36,19 +37,52 @@ def __init__(self) -> None: """ # Add delete event handler because we like to monitor the resources in mid CSE.event.addHandler(CSE.event.deleteResource, self.handleDeleteEvent) # type: ignore + + # Add handler for configuration updates + CSE.event.addHandler(CSE.event.configUpdate, self.configUpdate) # type: ignore + + # Add a handler when the CSE is reset + CSE.event.addHandler(CSE.event.cseReset, self.restart) # type: ignore + + # Assign configuration values + self._assignConfig() + L.isInfo and L.log('GroupManager initialized') def shutdown(self) -> bool: - """ Shutdown the Group Manager. + """ Shutdown the GroupManager. Returns: - *True* when shutdown complete. + *True* when shutdown is complete. """ L.isInfo and L.log('GroupManager shut down') return True + def _assignConfig(self) -> None: + """ Assign the configuration values. + """ + self.resultExpirationTime = Configuration.get('resource.grp.resultExpirationTime') + + + def configUpdate(self, name:str, + key:Optional[str] = None, + value:Any = None) -> None: + """ Handle configuration updates. + """ + if key not in ( 'resource.grp.resultExpirationTime' ): + return + self._assignConfig() + + + def restart(self, name:str) -> None: + """ Restart the registration services. + """ + self._assignConfig() + L.isDebug and L.logDebug('GroupManager restarted') + + ######################################################################### def validateGroup(self, group:Resource, originator:str) -> None: @@ -149,24 +183,25 @@ def _checkMembersAndPrivileges(self, group:Resource, originator:str) -> None: # check specializationType spty if (spty := group.spty): - if isinstance(spty, int): # mgmtobj type - if isinstance(resource, MgmtObj) and ty != spty: - raise GROUP_MEMBER_TYPE_INCONSISTENT(f'resource and group member types mismatch: {ty} != {spty} for: {mid}') - elif isinstance(spty, str): # fcnt specialization - if isinstance(resource, FCNT) and resource.cnd != spty: - raise GROUP_MEMBER_TYPE_INCONSISTENT(f'resource and group member specialization types mismatch: {resource.cnd} != {spty} for: {mid}') + match spty: + case int(): # mgmtobj type + if isinstance(resource, MgmtObj) and ty != spty: + raise GROUP_MEMBER_TYPE_INCONSISTENT(f'resource and group member types mismatch: {ty} != {spty} for: {mid}') + case str(): # fcnt specialization + if isinstance(resource, FCNT) and resource.cnd != spty: + raise GROUP_MEMBER_TYPE_INCONSISTENT(f'resource and group member specialization types mismatch: {resource.cnd} != {spty} for: {mid}') # check type of resource and member type of group mt = group.mt if not (mt == ResourceTypes.MIXED or ty == mt): # types don't match - csy = group.csy - if csy == ConsistencyStrategy.abandonMember: # abandon member - continue - elif csy == ConsistencyStrategy.setMixed: # change group's member type - mt = ResourceTypes.MIXED - group['mt'] = ResourceTypes.MIXED - else: # abandon group - raise GROUP_MEMBER_TYPE_INCONSISTENT('group consistency strategy and type "mixed" mismatch') + match group.csy: + case ConsistencyStrategy.abandonMember: # abandon member + continue + case ConsistencyStrategy.setMixed: # change group's member type + mt = ResourceTypes.MIXED + group['mt'] = ResourceTypes.MIXED + case _: + raise GROUP_MEMBER_TYPE_INCONSISTENT('group consistency strategy and type "mixed" mismatch') # member seems to be ok, so add ri to the list if isLocalResource: @@ -199,6 +234,8 @@ def foptRequest(self, operation:Operation, `Result` instance. """ + L.isDebug and L.logDebug(f'Performing fanOutPoint operation: {operation} on: {id}') + # get parent / group and check permissions if not (groupResource := fopt.retrieveParentResource()): raise NOT_FOUND('group resource not found') @@ -209,10 +246,9 @@ def foptRequest(self, operation:Operation, #check access rights for the originator through memberAccessControlPolicies if not CSE.security.hasAccess(originator, groupResource, requestedPermission = permission, ty = request.ty): raise ORIGINATOR_HAS_NO_PRIVILEGE('insufficient privileges for originator') + # check whether there is something after the /fopt ... - - # _, _, tail = id.partition('/fopt/') if '/fopt/' in id else (None, None, '') _, _, tail = id.partition('/fopt/') L.isDebug and L.logDebug(f'Adding additional path elements: {tail}') @@ -221,14 +257,37 @@ def foptRequest(self, operation:Operation, resultList:List[Result] = [] tail = '/' + tail if len(tail) > 0 else '' # add remaining path, if any - for mid in groupResource.mid.copy(): # copy mi because it is changed in the loop + _mid = groupResource.mid.copy() # copy mi because it is changed in the loop + + # Determine the timeout for aggregating requests. + # If Result Expiration Timestamp is present in the request then use that one. + # Else use the default configuration, if set to a value > 0 + if request.rset is not None: + _timeoutTS = request._rsetUTCts + elif self.resultExpirationTime > 0: + _timeoutTS = utcTime() + self.resultExpirationTime + else: + _timeoutTS = 0 + + for mid in _mid: # Try to get the SRN and add the tail if srn := structuredPathFromRI(mid): mid = srn + tail else: mid = mid + tail # Invoke the request - resultList.append(CSE.request.processRequest(request, originator, mid)) + _result = CSE.request.processRequest(request, originator, mid) + # Check for RSET expiration + if _timeoutTS and _timeoutTS < utcTime(): + # Check for blocking request. Then raise a timeout + if request.rt == ResponseType.blockingRequest: + raise REQUEST_TIMEOUT(L.logDebug('Aggregation timed out')) + # Otherwise just interrupt the aggregation + break + # Append the result + resultList.append(_result) + # import time + # time.sleep(1.0) # construct aggregated response if len(resultList) > 0: @@ -244,6 +303,12 @@ def foptRequest(self, operation:Operation, items.append(item) rsp = { 'm2m:rsp' : items} agr = { 'm2m:agr' : rsp } + + # if the request is a flexBlocking request and the number of results is not equal to the number of members + # then the request must be marked as incomplete. This will be removed later when adding to the resource. + if len(_mid) != len(resultList) and request.rt == ResponseType.flexBlocking: + agr['acme:incomplete'] = True # type: ignore + else: agr = {} diff --git a/acme/services/HttpServer.py b/acme/services/HttpServer.py index e2e71f27..e400edf3 100644 --- a/acme/services/HttpServer.py +++ b/acme/services/HttpServer.py @@ -8,22 +8,24 @@ # from __future__ import annotations -from typing import Any, Callable, cast, Tuple, Optional +from typing import Any, Callable, cast, Optional import logging, sys, urllib3, re from copy import deepcopy import flask from flask import Flask, Request, request + from werkzeug.wrappers import Response from werkzeug.serving import WSGIRequestHandler from werkzeug.datastructures import MultiDict +from waitress import serve from flask_cors import CORS import requests import isodate from ..etc.Constants import Constants -from ..etc.Types import ReqResp, RequestType, ResourceTypes, Result, ResponseStatusCode, JSON +from ..etc.Types import ReqResp, RequestType, Result, ResponseStatusCode, JSON from ..etc.Types import Operation, CSERequest, ContentSerializationType, DesiredIdentifierResultType, ResponseType, ResultContentType from ..etc.ResponseStatusCodes import INTERNAL_SERVER_ERROR, BAD_REQUEST, REQUEST_TIMEOUT, TARGET_NOT_REACHABLE, ResponseException from ..etc.Utils import exceptionToResult, renameThread, uniqueRI, toSPRelative, removeNoneValuesFromDict,isURL @@ -48,6 +50,12 @@ """ Type definition for flask handler. """ + +######################################################################### +# +# HTTP Server +# + class HttpServer(object): __close__ = ( @@ -63,11 +71,15 @@ class HttpServer(object): 'isStopped', 'corsEnable', 'corsResources', + 'enableBasicAuth', + 'enableTokenAuth', + 'wsgiEnable', + 'wsgiThreadPoolSize', + 'wsgiConnectionLimit', 'backgroundActor', 'serverID', '_responseHeaders', 'webui', - 'mappeings', 'httpActor', '_eventHttpRetrieve', @@ -83,18 +95,14 @@ def __init__(self) -> None: # Initialize the http server # Meaning defaults are automatically provided. self.flaskApp = Flask(CSE.cseCsi) - self.rootPath = Configuration.get('http.root') - self.serverAddress = Configuration.get('http.address') - self.listenIF = Configuration.get('http.listenIF') - self.port = Configuration.get('http.port') - self.allowPatchForDelete= Configuration.get('http.allowPatchForDelete') - self.requestTimeout = Configuration.get('http.timeout') - self.webuiRoot = Configuration.get('webui.root') - self.webuiDirectory = f'{Configuration.get("packageDirectory")}/webui' - self.isStopped = False - self.corsEnable = Configuration.get('http.cors.enable') - self.corsResources = Configuration.get('http.cors.resources') + # Get the configuration settings + self._assignConfig() + + # Add handler for configuration updates + CSE.event.addHandler(CSE.event.configUpdate, self.configUpdate) # type: ignore + + self.isStopped = False self.backgroundActor:BackgroundWorker = None self.serverID = f'ACME {Constants.version}' # The server's ID for http response headers @@ -160,6 +168,56 @@ def __init__(self) -> None: self._eventResponseReceived = CSE.event.responseReceived # type: ignore [attr-defined] + def _assignConfig(self) -> None: + """ Assign the configuration values to the http server. + """ + self.rootPath = Configuration.get('http.root') + self.serverAddress = Configuration.get('http.address') + self.listenIF = Configuration.get('http.listenIF') + self.port = Configuration.get('http.port') + self.allowPatchForDelete= Configuration.get('http.allowPatchForDelete') + self.requestTimeout = Configuration.get('http.timeout') + self.webuiRoot = Configuration.get('webui.root') + self.webuiDirectory = f'{Configuration.get("packageDirectory")}/webui' + self.corsEnable = Configuration.get('http.cors.enable') + self.corsResources = Configuration.get('http.cors.resources') + self.enableBasicAuth = Configuration.get('http.security.enableBasicAuth') + self.enableTokenAuth = Configuration.get('http.security.enableTokenAuth') + self.wsgiEnable = Configuration.get('http.wsgi.enable') + self.wsgiThreadPoolSize = Configuration.get('http.wsgi.threadPoolSize') + self.wsgiConnectionLimit= Configuration.get('http.wsgi.connectionLimit') + + + def configUpdate(self, name:str, + key:Optional[str] = None, + value:Any = None) -> None: + """ Handle configuration updates. + + Args: + name: The name of the configuration section. + key: The key of the configuration value. + value: The new value. + """ + if key not in ( 'http.root', + 'http.address', + 'http.listenIF', + 'http.port', + 'http.allowPatchForDelete', + 'http.timeout', + 'webui.root', + 'http.cors.enable', + 'http.cors.resources', + 'http.wsgi.enable', + 'http.wsgi.threadPoolSize', + 'http.wsgi.connectionLimit', + 'http.security.enableBasicAuth', + 'http.security.enableTokenAuth', + 'mqtt.security.password' + ): + return + self._assignConfig() + + def run(self) -> bool: """ Run the http server in a separate thread. """ @@ -205,12 +263,21 @@ def _run(self) -> None: cli.show_server_banner = lambda *x: None # type: ignore # Start the server try: - self.flaskApp.run(host = self.listenIF, - port = self.port, - threaded = True, - request_handler = ACMERequestHandler, - ssl_context = CSE.security.getSSLContext(), - debug = False) + if self.wsgiEnable: + L.isInfo and L.log(f'HTTP server listening on {self.listenIF}:{self.port} (wsgi)') + serve(self.flaskApp, + host = self.listenIF, + port = self.port, + threads = self.wsgiThreadPoolSize, + connection_limit = self.wsgiConnectionLimit) + else: + L.isInfo and L.log(f'HTTP server listening on {self.listenIF}:{self.port} (flask http)') + self.flaskApp.run(host = self.listenIF, + port = self.port, + threaded = True, + request_handler = ACMERequestHandler, + ssl_context = CSE.security.getSSLContext(), + debug = False) except Exception as e: # No logging for headless, nevertheless print the reason what happened if CSE.isHeadless: @@ -277,12 +344,16 @@ def _handleRequest(self, path:str, operation:Operation) -> Response: def handleGET(self, path:Optional[str] = None) -> Response: + if not self.handleAuthentication(): + return Response(status = 401) renameThread('HTRE') self._eventHttpRetrieve() return self._handleRequest(path, Operation.RETRIEVE) def handlePOST(self, path:Optional[str] = None) -> Response: + if not self.handleAuthentication(): + return Response(status = 401) if self._hasContentType(): renameThread('HTCR') self._eventHttpCreate() @@ -294,12 +365,16 @@ def handlePOST(self, path:Optional[str] = None) -> Response: def handlePUT(self, path:Optional[str] = None) -> Response: + if not self.handleAuthentication(): + return Response(status = 401) renameThread('HTUP') self._eventHttpUpdate() return self._handleRequest(path, Operation.UPDATE) def handleDELETE(self, path:Optional[str] = None) -> Response: + if not self.handleAuthentication(): + return Response(status = 401) renameThread('HTDE') self._eventHttpDelete() return self._handleRequest(path, Operation.DELETE) @@ -308,6 +383,8 @@ def handleDELETE(self, path:Optional[str] = None) -> Response: def handlePATCH(self, path:Optional[str] = None) -> Response: """ Support instead of DELETE for http/1.0. """ + if not self.handleAuthentication(): + return Response(status = 401) if request.environ.get('SERVER_PROTOCOL') != 'HTTP/1.0': return Response(L.logWarn('PATCH method is only allowed for HTTP/1.0. Rejected.'), status = 405) renameThread('HTDE') @@ -350,6 +427,11 @@ def handleUpperTester(self, path:Optional[str] = None) -> Response: if self.isStopped: return Response('Service not available', status = 503) + # Check, when authentication is enabled, the user is authorized, else return status 401 + if not self.handleAuthentication(): + return Response(status = 401) + + def prepareUTResponse(rcs:ResponseStatusCode, result:str) -> Response: """ Prepare the Upper Tester Response. """ @@ -541,6 +623,47 @@ def sendHttpRequest(self, request:CSERequest, url:str) -> Result: self._eventResponseReceived(resp) return res + ######################################################################### + + # + # Handle authentication + # + + def handleAuthentication(self) -> bool: + """ Handle the authentication for the current request. + + Return: + True if the request is authenticated, False otherwise. + """ + if not (self.enableBasicAuth or self.enableTokenAuth): + return True + + if (authorization := request.authorization) is None: + L.isDebug and L.logDebug('No authorization header found.') + return False + + match authorization.type: + case 'basic': + return self._handleBasicAuthentication(authorization.parameters) + case 'bearer': + return self._handleTokenAuthentication(authorization.token) + case _: + L.isWarn and L.logWarn(f'Unsupported authentication method: {authorization.type}') + return False + + + def _handleBasicAuthentication(self, parameters:dict) -> bool: + if not CSE.security.validateHttpBasicAuth(parameters['username'], parameters['password']): + L.isWarn and L.logWarn(f'Invalid username or password for basic authentication: {parameters["username"]}') + return False + return True + + + def _handleTokenAuthentication(self, token:str) -> bool: + if not CSE.security.validateHttpTokenAuth(token): + L.isWarn and L.logWarn(f'Invalid token for token authentication: {token}') + return False + return True ######################################################################### @@ -573,6 +696,7 @@ def _prepareResponse(self, result:Result, result.request.rvi = originalRequest.rvi result.request.vsi = originalRequest.vsi result.request.ec = originalRequest.ec + result.request.rset = originalRequest.rset # # Transform request to oneM2M request @@ -596,6 +720,8 @@ def _prepareResponse(self, result:Result, headers[Constants().hfRVI] = rvi if vsi := findXPath(cast(JSON, outResult.data), 'vsi'): headers[Constants().hfVSI] = vsi + if rset := findXPath(cast(JSON, outResult.data), 'rset'): + headers[Constants().hfRST] = rset headers[Constants().hfOT] = getResourceDate() # HTTP status code @@ -659,10 +785,11 @@ def extractMultipleArgs(args:MultiDict, argName:str) -> None: req['op'] = operation.value # Needed later for validation # resolve http's /~ and /_ special prefixs - if path[0] == '~': - path = path[1:] # ~/xxx -> /xxx - elif path[0] == '_': - path = f'/{path[1:]}' # _/xxx -> //xxx + match path[0]: + case '~': + path = path[1:] # ~/xxx -> /xxx + case '_': + path = f'/{path[1:]}' # _/xxx -> //xxx req['to'] = path @@ -709,7 +836,6 @@ def extractMultipleArgs(args:MultiDict, argName:str) -> None: # parse accept header cseRequest.httpAccept = [ a for a in _headers.getlist('accept') if a != '*/*' ] - cseRequest.originalHttpArgs = deepcopy(request.args) # Keep the original args # copy request arguments for greedy attributes checking _args = request.args.copy() # type: ignore [no-untyped-call] diff --git a/acme/services/Importer.py b/acme/services/Importer.py index 31d8d2e8..5c107b18 100644 --- a/acme/services/Importer.py +++ b/acme/services/Importer.py @@ -11,7 +11,7 @@ """ Import various resources, scripts, policies etc into the CSE. """ from __future__ import annotations -from typing import cast, Sequence, Optional +from typing import cast, Sequence, Optional, Tuple import json, os, fnmatch, re from copy import deepcopy @@ -37,6 +37,7 @@ class Importer(object): __slots__ = ( 'resourcePath', + 'extendedResourcePath', 'macroMatch', 'isImporting', @@ -46,12 +47,13 @@ class Importer(object): # List of "priority" resources that must be imported first for correct CSE operation _firstImporters = [ 'csebase.json'] - _enumValues:dict[str, list[int]] = {} + _enumValues:dict[str, dict[int, str]] = {} def __init__(self) -> None: """ Initialization of an *Importer* instance. """ self.resourcePath = Configuration.get('cse.resourcesPath') + self.extendedResourcePath = None self.macroMatch = re.compile(r"\$\{[\w.]+\}") self.isImporting = False L.isInfo and L.log('Importer initialized') @@ -95,7 +97,7 @@ def removeImports(self) -> None: # Scripts # - def importScripts(self, path:Optional[str] = None) -> bool: + def importScripts(self, path:Optional[str|list[str]] = None) -> bool: """ Import the ACME script from a directory. Args: @@ -110,6 +112,12 @@ def importScripts(self, path:Optional[str] = None) -> bool: if (path := self.resourcePath) is None: L.logErr('cse.resourcesPath not set') raise RuntimeError('cse.resourcesPath not set') + path = [ path ] + for _e in os.scandir(self.resourcePath): + if _e.is_dir() and _e.name.endswith('.scripts'): + path.append(_e.path) + self.extendedResourcePath = path # save for later use + self._prepareImporting() try: L.isInfo and L.log(f'Importing scripts from directory(s): {path}') @@ -117,21 +125,21 @@ def importScripts(self, path:Optional[str] = None) -> bool: return False # Check that there is only one startup script, then execute it - if len(scripts := CSE.script.findScripts(meta = _metaInit)) > 1: - L.logErr(f'Only one initialization script allowed. Found: {",".join([ s.scriptName for s in scripts ])}') - return False - - elif len(scripts) == 1: - # Check whether there is already a filled DB, then skip the imports - if CSE.dispatcher.countResources() > 0: - L.isInfo and L.log('Resources already imported, skipping boostrap') - else: - # Run the startup script. There shall only be one. - s = scripts[0] - L.isInfo and L.log(f'Running boostrap script: {s.scriptName}') - if not CSE.script.runScript(s): - L.logErr(f'Error during startup: {s.error}') - return False + match len(scripts := CSE.script.findScripts(meta = _metaInit)): + case l if l > 1: + L.logErr(f'Only one initialization script allowed. Found: {",".join([ s.scriptName for s in scripts ])}') + return False + case 1: + # Check whether there is already a filled DB, then skip the imports + if CSE.dispatcher.countResources() > 0: + L.isInfo and L.log('Resources already imported, skipping boostrap') + else: + # Run the startup script. There shall only be one. + s = scripts[0] + L.isInfo and L.log(f'Running boostrap script: {s.scriptName}') + if not CSE.script.runScript(s): + L.logErr(f'Error during startup: {s.error}') + return False finally: # This is executed no matter whether the code above returned or just succeeded self._finishImporting() @@ -243,10 +251,38 @@ def importEnumPolicies(self, path:Optional[str] = None) -> bool: return False for enumName, enumDef in enums.items(): - if not (evalues := enumDef.get('evalues')): - L.logErr(f'Missing or empty enumeration values (evalues) in file: {fn}') + if not isinstance(enumDef, dict): + L.logErr(f'Wrong or empty enumeration definition for enum: {enumName} in file: {fn}') return False - self._enumValues[enumName] = self._expandEnumValues(evalues, enumName, fn) + + enm:dict[int, str] = {} + for enumValue, enumInterpretation in enumDef.items(): + s, found, e = enumValue.partition('..') + if not found: + # Single value + try: + value = int(enumValue) + except ValueError: + L.logErr(f'Wrong enumeration value: {enumValue} in enum: {enumName} in file: {fn} (must be an integer)') + return False + if not isinstance(enumInterpretation, str): + L.logErr(f'Wrong interpretation for enum value: {enumValue} in enum: {enumName} in file: {fn}') + return False + enm[value] = enumInterpretation + + else: + # Range + try: + si = int(s) + ei = int(e) + except ValueError: + L.logErr(f'Error in evalue range definition: {enumValue} (range shall consist of integer numbers) for enum attribute: {enumName} in file: {fn}', showStackTrace=False) + return None + for i in range(si, ei+1): + enm[i] = enumInterpretation + + self._enumValues[enumName] = enm + return True @@ -387,21 +423,22 @@ def importAttributePolicies(self, path:Optional[str] = None) -> bool: # Check whether there is an unresolved type used in any of the attributes (in the type and listType) # TODO ? The following can be optimized sometimes, but since it is only called once during startup the small overhead may be neglectable. for p in CSE.validator.getAllAttributePolicies().values(): - if p.type == BasicType.complex: - for each in CSE.validator.getAllAttributePolicies().values(): - if p.typeName == each.ctype: # found a definition - break - else: - L.logErr(f'No type or complex type definition found: {p.typeName} for attribute: {p.sname} in file: {p.fname}', showStackTrace = False) - return False - elif p.type == BasicType.list and p.ltype is not None: - if p.ltype == BasicType.complex: + match p.type: + case BasicType.complex: for each in CSE.validator.getAllAttributePolicies().values(): - if p.lTypeName == each.ctype: # found a definition + if p.typeName == each.ctype: # found a definition break else: - L.logErr(f'No list sub-type definition found: {p.lTypeName} for attribute: {p.sname} in file: {p.fname}', showStackTrace = False) - return False + L.logErr(f'No type or complex type definition found: {p.typeName} for attribute: {p.sname} in file: {p.fname}', showStackTrace = False) + return False + case BasicType.list if p.ltype is not None: + if p.ltype == BasicType.complex: + for each in CSE.validator.getAllAttributePolicies().values(): + if p.lTypeName == each.ctype: # found a definition + break + else: + L.logErr(f'No list sub-type definition found: {p.lTypeName} for attribute: {p.sname} in file: {p.fname}', showStackTrace = False) + return False L.isDebug and L.logDebug(f'Imported {countAP} attribute policies') return True @@ -517,6 +554,8 @@ def _parseAttribute(self, attr:JSON, # Check and determine the list type lTypeName:str = None ltype:BasicType = None + etype:str = None + evalues:dict[int, str] = None if checkListType: # TODO remove this when flexContainer definitions support list sub-types if lTypeName := findXPath(attr, 'ltype'): if not isinstance(lTypeName, str) or len(lTypeName) == 0: @@ -528,15 +567,14 @@ def _parseAttribute(self, attr:JSON, if not (ltype := BasicType.to(lTypeName)): # automatically a complex type if not found in the type definition. Check for this happens later ltype = BasicType.complex if ltype == BasicType.enum: # check sub-type enums - evalues:Sequence[int|str] if (etype := findXPath(attr, 'etype')): # Get the values indirectly from the enums read above evalues = self._enumValues.get(etype) else: - evalues = findXPath(attr, 'evalues') - if not evalues or not isinstance(evalues, list): + evalues = findXPath(attr, 'evalues') # TODO? + if not evalues or not isinstance(evalues, dict): L.logErr(f'Missing, wrong of empty enum values (evalue) list for attribute: {tpe} in file: {fn}', showStackTrace=False) return None - evalues = self._expandEnumValues(evalues, tpe, fn) + # evalues = self._expandEnumValues(evalues, tpe, fn) # TODO this is perhaps wrong, bc we changed the evalue handling to a different format if typ == BasicType.list and lTypeName is None: L.isDebug and L.logDebug(f'Missing list type for attribute: {tpe} in file: {fn}') @@ -546,11 +584,11 @@ def _parseAttribute(self, attr:JSON, if (etype := findXPath(attr, 'etype')): # Get the values indirectly from the enums read above evalues = self._enumValues.get(etype) else: - evalues = findXPath(attr, 'evalues') - if not evalues or not isinstance(evalues, list): + evalues = findXPath(attr, 'evalues') # TODO? + if not evalues or not isinstance(evalues, dict): L.logErr(f'Missing, wrong of empty enum values (evalue) list for attribute: {tpe} etype: {etype} in file: {fn}', showStackTrace=False) return None - evalues = self._expandEnumValues(evalues, tpe, fn) + # evalues = self._expandEnumValues(evalues, tpe, fn) # Check missing complex type definition if typ == BasicType.dict or ltype == BasicType.dict: @@ -582,6 +620,7 @@ def _parseAttribute(self, attr:JSON, ctype = ctype, fname = fn, ltype = ltype, + etype = etype, lTypeName = lTypeName, evalues = evalues ) diff --git a/acme/services/LocationManager.py b/acme/services/LocationManager.py new file mode 100644 index 00000000..2831fdcf --- /dev/null +++ b/acme/services/LocationManager.py @@ -0,0 +1,372 @@ +# +# LocationManager.py +# +# (c) 2023 by Andreas Kraft +# License: BSD 3-Clause License. See the LICENSE file for further details. +# + +""" This module implements location service and helper functions. +""" + +from __future__ import annotations + +from typing import Tuple, Optional, Literal +from dataclasses import dataclass +import json + +from ..helpers.BackgroundWorker import BackgroundWorkerPool, BackgroundWorker +from ..etc.Types import LocationInformationType, LocationSource, GeofenceEventCriteria, ResourceTypes, GeometryType, GeoSpatialFunctionType +from ..etc.DateUtils import fromDuration +from ..etc.GeoTools import getGeoPoint, getGeoPolygon, isLocationInsidePolygon, geoWithin, geoContains, geoIntersects +from ..etc.ResponseStatusCodes import BAD_REQUEST +from ..services.Logging import Logging as L +from ..services import CSE +from ..resources.LCP import LCP +from ..resources.CIN import CIN +from ..resources import Factory +from ..resources.Resource import Resource + +GeofencePositionType = Literal[GeofenceEventCriteria.Inside, GeofenceEventCriteria.Outside] +""" Type alias for the geofence position.""" + +LocationType = Tuple[float, float] +""" Type alias for the location type.""" + +@dataclass +class LocationInformation(object): + """ Location information for a location policy. + """ + worker:BackgroundWorker = None + """ The worker for the location policy. """ + location:Optional[LocationType] = None + """ The current location. """ + targetArea:Optional[list[LocationType]] = None + """ The polygon. """ + geofencePosition:GeofencePositionType = GeofenceEventCriteria.Inside + """ The current position type (inside, outside). """ + eventCriteria:GeofenceEventCriteria = GeofenceEventCriteria.Inside + """ The event criteria. """ + locationContainerID:Optional[str] = None + """ The location container resource ID. """ + + +class LocationManager(object): + """ The LocationManager class implements the location service and helper functions. + + Attributes: + locationPolicyWorkers: A dictionary of location policy workers + """ + + __slots__ = ( + 'locationPolicyInfos', + 'deviceDefaultPosition' + ) + + + def __init__(self) -> None: + """ Initialization of the LocationManager module. + """ + + self.locationPolicyInfos:dict[str, LocationInformation] = {} + + self.deviceDefaultPosition:GeofencePositionType = GeofenceEventCriteria.Inside # Default event criteria + # Add a handler when the CSE is reset + CSE.event.addHandler(CSE.event.cseReset, self.restart) # type: ignore + L.isInfo and L.log('LocationManager initialized') + + +# TODO rebuild the list of location policies when the CSE is reset or started. OR create a DB + + def shutdown(self) -> bool: + """ Shutdown the LocationManager. + + Returns: + Boolean that indicates the success of the operation + """ + L.isInfo and L.log('LocationManager shut down') + return True + + + def restart(self, name:str) -> None: + """ Restart the LocationManager. + """ + L.isDebug and L.logDebug('LocationManager restarted') + + + ######################################################################### + + def addLocationPolicy(self, lcp:LCP) -> None: + """ Add a location policy. + + Args: + lcp: The location policy to add. + """ + L.isDebug and L.logDebug('Adding location policy') + lcpRi = lcp.ri + gta = getGeoPolygon(lcp.gta) + loi = lcp.loi + + # Remove first if already running + if lcpRi in self.locationPolicyInfos: + self.removeLocationPolicy(lcp) + + # Check whether the location source is device based (only one supported right now) + if lcp.los != LocationSource.Device_based: + L.isDebug and L.logDebug('Only device based location source supported') + return # Not supported + + # Add an empty entry first. + self.locationPolicyInfos[lcpRi] = LocationInformation(targetArea = gta, + geofencePosition = self.deviceDefaultPosition, + eventCriteria = lcp.gec, + locationContainerID = loi) + + # Check if the location information type / position is fixed + if (lit := lcp.lit) is None or lit == LocationInformationType.Position_fix: + L.isDebug and L.logDebug('Location information type not set or position fix. Ignored.') + return # No updates needed + + # Get the periodicity + if (lou := lcp.lou) is None or len(lou) == 0: # locationUpdatePeriodicity + L.isDebug and L.logDebug('Location update periodicity not set. Ignored.') + return # No updates needed. Checks are done when the location is requested via + if (_lou := fromDuration(lou[0], False)) == 0.0: # just take the first duration + L.isDebug and L.logDebug('Location update periodicity is 0. Ignored.') + return + + # Create a worker + L.isDebug and L.logDebug(f'Starting location policy worker for: {lcpRi} Intervall: {_lou}') + self.locationPolicyInfos[lcpRi] = LocationInformation(worker = BackgroundWorkerPool.newWorker(interval = _lou, + workerCallback = self.locationWorker, + name = f'lcp_{lcp.ri}', + startWithDelay = True).start(lcpRi = lcpRi), + targetArea = gta, + geofencePosition = self.deviceDefaultPosition, + eventCriteria = lcp.gec, + locationContainerID = loi + ) + # # Immediately update the location + # self.getNewLocation(lcpRi) + + + + def removeLocationPolicy(self, lcp:LCP) -> None: + """ Remove a location policy. This will stop the worker and remove the LCP from the internal list. + + Args: + lcp: The LCP to remove. + """ + L.isDebug and L.logDebug('Removing location policy') + + # Stopping the worker and remove the LCP from the internal list + if (ri := lcp.ri) in self.locationPolicyInfos: + L.isDebug and L.logDebug('Stopping location policy worker') + if (worker := self.locationPolicyInfos[ri].worker) is not None: + worker.stop() + del self.locationPolicyInfos[ri] + + + def updateLocationPolicy(self, lcp:LCP) -> None: + """ Update a location policy. This will remove the old location policy and add a new one. + """ + L.isDebug and L.logDebug('Updating location policy') + self.removeLocationPolicy(lcp) + self.addLocationPolicy(lcp) + + + def handleLatestRetrieve(self, latest:CIN, lcpRi:str) -> None: + """ Handle a latest RETRIEVE request for a CNT with a location policy. + + Args: + latest: The latest CIN + lcpRi: The location policy resource ID + """ + if lcpRi is None: + return + + # Check if the location policy is supported + if (lcp := CSE.dispatcher.retrieveResource(lcpRi)) is not None: + if lcp.los == LocationSource.Network_based and lcp.lou is not None and lcp.lou == 0: + L.isDebug and L.logDebug(f'Handling latest RETRIEVE for CNT with locationID: {lcpRi}') + # Handle Network based location source + # NOT SUPPORTED YET + L.isWarn and L.logWarn('Network-based location source not supported yet') + + if (lit := lcp.lit) is None or lit == LocationInformationType.Position_fix: + L.isDebug and L.logDebug('Location information type not set or position fix. Ignored.') + return # No updates needed + + + if (locations := self.getNewLocation(lcpRi, content = latest.con)) is None: + return + + # check if the location is inside the polygon and update the location event + self.updateLocationEvent(locations[0], locations[1], lcpRi) + + # TODO do something with the result + + + def locationWorker(self, lcpRi:str) -> bool: + """ Worker function for location policies. This will be called periodically to update the location. + + Args: + lcpRi: The resource ID of the location policy + + Returns: + True if the worker should be continued, False otherwise. + """ + + if (locations := self.getNewLocation(lcpRi)) is None: + return True # something went wrong, but still continue + + self.updateLocationEvent(locations[0], locations[1], lcpRi) + + return True + + + + + ######################################################################### + + + + def getNewLocation(self, lcpRi:str, content:Optional[str] = None) -> Optional[Tuple[LocationType, LocationType]]: + """ Get the new location for a location policy. Also, update the internal policy info if necessary. + + Args: + lcpRi: The resource ID of the location policy + content: The content of the latest CIN of the location policy's container resource + + Returns: + The new and old locations as a tuple of (latitude, longitude), or None if the location is invalid or not found + """ + + # Get the location policy info + if (info := self.locationPolicyInfos.get(lcpRi)) is None: + L.isWarn and L.logWarn(f'Internal location policy info for: {lcpRi} not found') + return None + + # Get the content if not provided + if not content: + # Get the location from a location instance + if not (cin := CSE.dispatcher.retrieveLatestOldestInstance(info.locationContainerID, ResourceTypes.CIN)): + return None # No resource found, still continue + content = cin.con + + # Check whether the content is a valid location or an event + if content in ('', '1', '2', '3', '4'): # This could be done better... + return None # An event, so return + + # From here on, content is a location + if (newLocation := getGeoPoint(content)) is None: + L.isWarn and L.logWarn(f'Invalid location: {content}. Must be a valid GeoPoint') + + # Check if the location has changed, or there was no location before + oldLocation = info.location + if oldLocation != newLocation: + # Update the location in the location policy + self.locationPolicyInfos[lcpRi].location = newLocation + + return (newLocation, oldLocation) + + + def updateLocationEvent(self, newLocation:LocationType, oldLocation:LocationType, lcpRi:str) -> None: + """ Update the location event for a location policy if the location has changed and/or the event criteria is met. + + Args: + newLocation: The new location + oldLocation: The old location + lcpRi: The resource ID of the location policy + """ + + def addEventContentInstance(info:LocationInformation, eventType:GeofenceEventCriteria) -> None: + """ Add a new event content instance to the location policy's container resource. + + Args: + info: The location policy info + eventType: The type of the event + """ + L.isDebug and L.logDebug(f'Position: {eventType}') + cnt = CSE.dispatcher.retrieveResource(info.locationContainerID) + cin = Factory.resourceFromDict({ 'con': f'{eventType.value}' }, + pi = info.locationContainerID, + ty = ResourceTypes.CIN) + CSE.dispatcher.createLocalResource(cin, cnt) + + + if (info := self.locationPolicyInfos.get(lcpRi)) is None: + L.isWarn and L.logWarn(f'Internal location policy info for: {lcpRi} not found') + return + previousGeofencePosition = info.geofencePosition + currentGeofencePosition = self.checkGeofence(lcpRi, newLocation) + + match currentGeofencePosition: + case GeofenceEventCriteria.Inside if previousGeofencePosition == GeofenceEventCriteria.Outside and info.eventCriteria == GeofenceEventCriteria.Entering: + # Entering + addEventContentInstance(info, GeofenceEventCriteria.Entering) + case GeofenceEventCriteria.Outside if previousGeofencePosition == GeofenceEventCriteria.Inside and info.eventCriteria == GeofenceEventCriteria.Leaving: + # Leaving + addEventContentInstance(info, GeofenceEventCriteria.Leaving) + case GeofenceEventCriteria.Inside if previousGeofencePosition == GeofenceEventCriteria.Inside and info.eventCriteria == GeofenceEventCriteria.Inside: + # Inside + addEventContentInstance(info, GeofenceEventCriteria.Inside) + case GeofenceEventCriteria.Outside if previousGeofencePosition == GeofenceEventCriteria.Outside and info.eventCriteria == GeofenceEventCriteria.Outside: + # Outside + addEventContentInstance(info, GeofenceEventCriteria.Outside) + case _: + # No event + L.isDebug and L.logDebug(f'No event for: {previousGeofencePosition} -> {currentGeofencePosition} and event criteria: {GeofenceEventCriteria(info.eventCriteria)}') + + # update the geofence position + info.geofencePosition = currentGeofencePosition + info.location = newLocation + + + def checkGeofence(self, lcpRi:str, location:tuple[float, float]) -> GeofencePositionType: + """ Check if a location is inside or outside the polygon of a location policy. + + Args: + lcpRi: The resource ID of the location policy + location: The location to check + + Returns: + The geofence position of the location. Either *inside* or *outside*. + """ + result = GeofenceEventCriteria.Inside if isLocationInsidePolygon(self.locationPolicyInfos[lcpRi].targetArea, location) else GeofenceEventCriteria.Outside + # L.isDebug and L.logDebug(f'Location is: {result}') + return result # type:ignore [return-value] + + + ######################################################################### + # + # GeoLocation and GeoQuery + # + + def checkGeoLocation(self, r:Resource, gmty:GeometryType, geom:list, gsf:GeoSpatialFunctionType) -> bool: + """ Check if a resource's location confirms to a geo location. + + Args: + r: The resource to check. + gmty: The geometry type. + geom: The geometry. + gsf: The geo spatial function. + + Returns: + True if the resource's location confirms to the geo location, False otherwise. + """ + if (rGeom := r.getLocationCoordinates()) is None: + return False + rTyp = r.loc.get('typ') + + try: + match gsf: + case GeoSpatialFunctionType.Within: + return geoWithin(gmty, geom, rTyp, rGeom) + case GeoSpatialFunctionType.Contains: + return geoContains(gmty, geom, rTyp, rGeom) + case GeoSpatialFunctionType.Intersects: + return geoIntersects(gmty, geom, rTyp, rGeom) + case _: + raise ValueError(f'Invalid geo spatial function: {gsf}') + except ValueError as e: + raise BAD_REQUEST(L.logDebug(f'Invalid geometry: {e}')) diff --git a/acme/services/Logging.py b/acme/services/Logging.py index 0c3866f4..79391634 100644 --- a/acme/services/Logging.py +++ b/acme/services/Logging.py @@ -126,6 +126,7 @@ class Logging: enableQueue = False # Can be used to enable/disable the logging queue queueSize:int = 0 # max number of items in the logging queue. Might otherwise grow forever on large load filterSources:tuple[str, ...] = () # List of log sources that will be removed while processing the log messages + maxLogMessageLength:int = 0 # Max length of a log message. Longer messages will be truncated _console:Console = None _richHandler:ACMERichLogHandler = None @@ -156,6 +157,7 @@ def init() -> None: Logging.enableBindingsLogging = Configuration.get('logging.enableBindingsLogging') Logging.queueSize = Configuration.get('logging.queueSize') Logging.filterSources = tuple(Configuration.get('logging.filter')) + Logging.maxLogMessageLength = Configuration.get('logging.maxLogMessageLength') Logging._configureColors(Configuration.get('console.theme')) @@ -379,15 +381,17 @@ def logWithLevel(level:int, msg:Any, """ # TODO add a parameter frame substractor to correct the line number, here and in In _log() # TODO change to match in Python10 - if level == logging.DEBUG: - return Logging.logDebug(msg, stackOffset = stackOffset) - elif level == logging.INFO: - return Logging.log(msg, stackOffset = stackOffset) - elif level == logging.WARNING: - return Logging.logWarn(msg, stackOffset = stackOffset) - elif level == logging.ERROR: - return Logging.logErr(msg, showStackTrace = showStackTrace, stackOffset = stackOffset) - return msg + match level: + case logging.DEBUG: + return Logging.logDebug(msg, stackOffset = stackOffset) + case logging.INFO: + return Logging.log(msg, stackOffset = stackOffset) + case logging.WARNING: + return Logging.logWarn(msg, stackOffset = stackOffset) + case logging.ERROR: + return Logging.logErr(msg, showStackTrace = showStackTrace, stackOffset = stackOffset) + case _: + return msg @staticmethod @@ -425,6 +429,7 @@ def _log(level:int, msg:Any, stackOffset:Optional[int] = 0, immediate:Optional[b # Queue a log message : (level, message, caller from stackframe, current thread) caller = inspect.getframeinfo(inspect.stack()[stackOffset + 2][0]) thread = threading.current_thread() + msg = msg[:Logging.maxLogMessageLength] if Logging.maxLogMessageLength else msg # truncate message if necessary if Logging.enableQueue and not immediate: Logging.queue.put((level, msg, caller, thread)) else: @@ -454,14 +459,15 @@ def console(msg:Union[str, Text, Tree, Table, JSON] = ' ', style = Logging.terminalStyle if not isError else Logging.terminalStyleError if nlb: # Empty line before Logging._console.print() - if isinstance(msg, str): - Logging._console.print(msg if plain else Markdown(msg), style = style, end = end, highlight = False) - elif isinstance(msg, dict): - Logging._console.print(msg, style = style, end = end) - elif isinstance(msg, (Tree, Table, Text)): - Logging._console.print(msg, style = style, end = end) - else: - Logging._console.print(str(msg), style = style, end = end) + + match msg: + case str(): + Logging._console.print(msg if plain else Markdown(msg), style = style, end = end, highlight = False) + case dict() | Tree() | Table() | Text(): + Logging._console.print(msg, style = style, end = end) + case _: + Logging._console.print(str(msg), style = style, end = end) + if nl: # Empty line after Logging._console.print() diff --git a/acme/services/NotificationManager.py b/acme/services/NotificationManager.py index c53fc9e7..b7b0079d 100644 --- a/acme/services/NotificationManager.py +++ b/acme/services/NotificationManager.py @@ -22,8 +22,8 @@ from ..etc.ResponseStatusCodes import ResponseStatusCode, ResponseException, exceptionFromRSC from ..etc.ResponseStatusCodes import INTERNAL_SERVER_ERROR, SUBSCRIPTION_VERIFICATION_INITIATION_FAILED from ..etc.ResponseStatusCodes import TARGET_NOT_REACHABLE, REMOTE_ENTITY_NOT_REACHABLE, OPERATION_NOT_ALLOWED -from ..etc.ResponseStatusCodes import OPERATION_DENIED_BY_REMOTE_ENTITY -from ..etc.DateUtils import fromDuration, getResourceDate +from ..etc.ResponseStatusCodes import OPERATION_DENIED_BY_REMOTE_ENTITY, NOT_FOUND +from ..etc.DateUtils import fromDuration, getResourceDate, cronMatchesTimestamp, utcDatetime from ..etc.Utils import toSPRelative, pureResource, isAcmeUrl, compareIDs from ..helpers.TextTools import setXPath, findXPath from ..services import CSE @@ -179,8 +179,11 @@ def removeSubscription(self, subscription:SUB|CRS, originator:str) -> None: self.sendDeletionNotification([ nu for nu in acrs ], subscription.ri) # Finally remove subscriptions from storage - if not CSE.storage.removeSubscription(subscription): - raise INTERNAL_SERVER_ERROR('cannot remove subscription from database') + try: + if not CSE.storage.removeSubscription(subscription): + raise INTERNAL_SERVER_ERROR('cannot remove subscription from database') + except NOT_FOUND: + pass # ignore, could be expected def updateSubscription(self, subscription:SUB, previousNus:list[str], originator:str) -> None: @@ -232,7 +235,7 @@ def getSubscriptionsByNetChty(self, ri:str, def checkSubscriptions( self, - resource:Resource, + resource:Optional[Resource], reason:NotificationEventType, childResource:Optional[Resource] = None, modifiedAttributes:Optional[JSON] = None, @@ -276,74 +279,88 @@ def checkSubscriptions( self, # TODO ensure uniqueness subs.append(sub) + for sub in subs: + if reason not in sub['net']: # check whether reason is actually included in the subscription + continue - # TODO: Add access control check here. Perhaps then the special subscription - # DB data structure should go away and be replaced by the normal subscriptions - - - for sub in subs: # Prevent own notifications for subscriptions ri = sub['ri'] + + # Check whether reason is included in the subscription if childResource and \ ri == childResource.ri and \ reason in [ NotificationEventType.createDirectChild, NotificationEventType.deleteDirectChild ]: continue - if reason not in sub['net']: # check whether reason is actually included in the subscription - continue - if reason in [ NotificationEventType.createDirectChild, NotificationEventType.deleteDirectChild ]: # reasons for child resources - chty = sub['chty'] - if chty and not childResource.ty in chty: # skip if chty is set and child.type is not in the list - continue - self._handleSubscriptionNotification(sub, - reason, - resource = childResource, - modifiedAttributes = modifiedAttributes, - asynchronous = self.asyncSubscriptionNotifications) - self.countNotificationEvents(ri) - - # Check Update and enc/atr vs the modified attributes - elif reason == NotificationEventType.resourceUpdate and (atr := sub['atr']) and modifiedAttributes: - found = False - for k in atr: - if k in modifiedAttributes: - found = True - if found: + + # Check the subscription's schedule, but only if it is not an immediate notification + if not ((nec := sub['nec']) and nec == EventCategory.Immediate): + if (_sc := CSE.storage.searchScheduleForTarget(ri)): + _ts = utcDatetime() + + # Check whether the current time matches the schedule + for s in _sc: + if cronMatchesTimestamp(s, _ts): + break + else: + # No schedule matches the current time, so continue with the next subscription + continue + + match reason: + case NotificationEventType.createDirectChild | NotificationEventType.deleteDirectChild: # reasons for child resources + chty = sub['chty'] + if chty and not childResource.ty in chty: # skip if chty is set and child.type is not in the list + continue self._handleSubscriptionNotification(sub, reason, - resource = resource, - modifiedAttributes = modifiedAttributes, + resource = childResource, + modifiedAttributes = modifiedAttributes, asynchronous = self.asyncSubscriptionNotifications) self.countNotificationEvents(ri) - else: - L.isDebug and L.logDebug('Skipping notification: No matching attributes found') - # Check for missing data points (only for ) - elif reason == NotificationEventType.reportOnGeneratedMissingDataPoints and missingData: - md = missingData[sub['ri']] - if md.missingDataCurrentNr >= md.missingDataNumber: # Always send missing data if the count is greater then the minimum number + # Check Update and enc/atr vs the modified attributes + case NotificationEventType.resourceUpdate if (atr := sub['atr']) and modifiedAttributes: + found = False + for k in atr: + if k in modifiedAttributes: + found = True + if found: # any one found + self._handleSubscriptionNotification(sub, + reason, + resource = resource, + modifiedAttributes = modifiedAttributes, + asynchronous = self.asyncSubscriptionNotifications) + self.countNotificationEvents(ri) + else: + L.isDebug and L.logDebug('Skipping notification: No matching attributes found') + + # Check for missing data points (only for ) + case NotificationEventType.reportOnGeneratedMissingDataPoints if missingData: + md = missingData[sub['ri']] + if md.missingDataCurrentNr >= md.missingDataNumber: # Always send missing data if the count is greater then the minimum number + self._handleSubscriptionNotification(sub, + NotificationEventType.reportOnGeneratedMissingDataPoints, + missingData = copy.deepcopy(md), + asynchronous = self.asyncSubscriptionNotifications) + self.countNotificationEvents(ri) + md.clearMissingDataList() + + case NotificationEventType.blockingUpdate | NotificationEventType.blockingRetrieve | NotificationEventType.blockingRetrieveDirectChild: self._handleSubscriptionNotification(sub, - NotificationEventType.reportOnGeneratedMissingDataPoints, - missingData = copy.deepcopy(md), - asynchronous = self.asyncSubscriptionNotifications) + reason, + resource, + modifiedAttributes = modifiedAttributes, + asynchronous = False) # blocking NET always synchronous! self.countNotificationEvents(ri) - md.clearMissingDataList() - elif reason in [NotificationEventType.blockingUpdate, NotificationEventType.blockingRetrieve, NotificationEventType.blockingRetrieveDirectChild]: - self._handleSubscriptionNotification(sub, - reason, - resource, - modifiedAttributes = modifiedAttributes, - asynchronous = False) # blocking NET always synchronous! - self.countNotificationEvents(ri) - - else: # all other reasons that target the resource - self._handleSubscriptionNotification(sub, - reason, - resource, - modifiedAttributes = modifiedAttributes, - asynchronous = self.asyncSubscriptionNotifications) - self.countNotificationEvents(ri) + # all other reasons that target the resource + case _: + self._handleSubscriptionNotification(sub, + reason, + resource, + modifiedAttributes = modifiedAttributes, + asynchronous = self.asyncSubscriptionNotifications) + self.countNotificationEvents(ri) def checkPerformBlockingUpdate(self, resource:Resource, @@ -596,7 +613,7 @@ def _crsCheckForNotification(self, data:list[str], L.isDebug and L.logDebug(f'Checking : {crsRi} window properties: unique notification count: {len(data)}, max expected count: {subCount}, eem: {eem}') # Test for conditions - if (eem == EventEvaluationMode.ALL_EVENTS_PRESENT and len(data) == subCount) or \ + if ((eem is None or eem == EventEvaluationMode.ALL_EVENTS_PRESENT) and len(data) == subCount) or \ (eem == EventEvaluationMode.ALL_OR_SOME_EVENTS_PRESENT and 1 <= len(data) <= subCount) or \ (eem == EventEvaluationMode.SOME_EVENTS_MISSING and 1 <= len(data) < subCount) or \ (eem == EventEvaluationMode.ALL_OR_SOME_EVENTS_MISSING and 0 <= len(data) < subCount) or \ @@ -604,11 +621,23 @@ def _crsCheckForNotification(self, data:list[str], L.isDebug and L.logDebug(f'Received sufficient notifications - sending notification') + # Check the crossResourceSubscription's schedule, if there is one + if (_sc := CSE.storage.searchScheduleForTarget(crsRi)): + _ts = utcDatetime() + + # Check whether the current time matches any schedule + for s in _sc: + if cronMatchesTimestamp(s, _ts): + break + else: + # No schedule matches the current time, so clear the data and just return + L.isDebug and L.logDebug(f'No matching schedule found for : {crsRi}') + return + try: resource = CSE.dispatcher.retrieveResource(crsRi) except ResponseException as e: L.logWarn(f'Cannot retrieve resource: {crsRi}: {e.dbg}') # Not much we can do here - data.clear() return crs = cast(CRS, resource) @@ -640,7 +669,6 @@ def _crsCheckForNotification(self, data:list[str], else: L.isDebug and L.logDebug(f'No notification sent') - data.clear() # Time Window Monitor : Periodic @@ -679,11 +707,13 @@ def stopCRSPeriodicWindow(self, crsRi:str) -> None: def _crsPeriodicWindowMonitor(self, _data:list[str], + _worker:BackgroundWorker, crsRi:str, expectedCount:int, eem:EventEvaluationMode = EventEvaluationMode.ALL_EVENTS_PRESENT) -> bool: L.isDebug and L.logDebug(f'Checking periodic window for : {crsRi}') self._crsCheckForNotification(_data, crsRi, expectedCount, eem) + _worker.data = [] return True @@ -721,12 +751,15 @@ def stopCRSSlidingWindow(self, crsRi:str) -> None: BackgroundWorkerPool.stopWorkers(self._getSlidingWorkerName(crsRi)) - def _crsSlidingWindowMonitor(self, _data:Any, + def _crsSlidingWindowMonitor(self, _data:Any, + _worker:BackgroundWorker, crsRi:str, subCount:int, eem:EventEvaluationMode = EventEvaluationMode.ALL_EVENTS_PRESENT) -> bool: L.isDebug and L.logDebug(f'Checking sliding window for : {crsRi}') self._crsCheckForNotification(_data, crsRi, subCount, eem) + _worker.data = [] + # _data.clear() return True @@ -737,17 +770,19 @@ def receivedCrossResourceSubscriptionNotification(self, sur:str, crs:Resource) - crsTwt = crs.twt crsTws = crs.tws L.isDebug and L.logDebug(f'Received notification for : {crsRi}, twt: {crsTwt}, tws: {crsTws}') - if crsTwt == TimeWindowType.SLIDINGWINDOW: - if (workers := BackgroundWorkerPool.findWorkers(self._getSlidingWorkerName(crsRi))): - L.isDebug and L.logDebug(f'Adding notification to worker: {workers[0].name}') - if sur not in workers[0].data: - workers[0].data.append(sur) - else: - workers = [ self.startCRSSlidingWindow(crsRi, crsTws, sur, crs._countSubscriptions(), crs.eem) ] # sur is added automatically when creating actor - elif crsTwt == TimeWindowType.PERIODICWINDOW: - if (workers := BackgroundWorkerPool.findWorkers(self._getPeriodicWorkerName(crsRi))): - if sur not in workers[0].data: - workers[0].data.append(sur) + match crsTwt: + case TimeWindowType.SLIDINGWINDOW: + if (workers := BackgroundWorkerPool.findWorkers(self._getSlidingWorkerName(crsRi))): + L.isDebug and L.logDebug(f'Adding notification to worker: {workers[0].name}') + if sur not in workers[0].data: + workers[0].data.append(sur) + else: + workers = [ self.startCRSSlidingWindow(crsRi, crsTws, sur, crs._countSubscriptions(), crs.eem) ] # sur is added automatically when creating actor + + case TimeWindowType.PERIODICWINDOW: + if (workers := BackgroundWorkerPool.findWorkers(self._getPeriodicWorkerName(crsRi))): + if sur not in workers[0].data: + workers[0].data.append(sur) # No else: Periodic is running or not @@ -811,7 +846,7 @@ def _sender(nu: str, originator:str, content:JSON) -> bool: # Notification Statistics # - def validateAndConstructNotificationStatsInfo(self, sub:SUB|CRS) -> None: + def validateAndConstructNotificationStatsInfo(self, sub:SUB|CRS, add:Optional[bool] = True) -> None: """ Update and fill the *notificationStatsInfo* attribute of a \ or \ resource. This method adds, if necessary, the necessarry stat info structures for each notification @@ -822,8 +857,13 @@ def validateAndConstructNotificationStatsInfo(self, sub:SUB|CRS) -> None: Args: sub: The \ or \ resource for whoich to validate the attribute. + add: If True, add the *notificationStatsInfo* attribute if not present. """ + # Optionally add the attribute + if add: + sub.setAttribute('nsi', [], overwrite = False) + if (nsi := sub.nsi) is None: # nsi attribute must be at least an empty list return nus = sub.nu @@ -861,7 +901,7 @@ def countSentReceivedNotification(self, sub:SUB|CRS, isResponse: Indicates whether a sent notification or a received response should be counted for. count: Number of notifications to count. """ - if not sub or not sub.nse: # Don't count if disabled + if not sub or not sub.nse: # Don't count if not present or disabled return L.isDebug and L.logDebug(f'Incrementing notification stats for: {sub.ri} ({"response" if isResponse else "request"})') @@ -872,6 +912,11 @@ def countSentReceivedNotification(self, sub:SUB|CRS, # We have to lock this to prevent race conditions in some cases with CRS handling with self.lockNotificationEventStats: sub.dbReloadDict() # get a fresh copy of the subscription + + # Add nsi if not present. This happens when the first notification is sent after enabling the recording + if sub.nsi is None: + self.validateAndConstructNotificationStatsInfo(sub, True) # nsi is filled here again + for each in sub.nsi: if each['tg'] == target: each[activeField] += count @@ -895,7 +940,7 @@ def countNotificationEvents(self, ri:str, # TODO check resource type? except ResponseException as e: return - if not sub.nse: # Don't count if disabled + if not sub.nse: # Don't count if not present or disabled return L.isDebug and L.logDebug(f'Incrementing notification event stat for: {sub.ri}') @@ -904,6 +949,11 @@ def countNotificationEvents(self, ri:str, # We have to lock this to prevent race conditions in some cases with CRS handling with self.lockNotificationEventStats: sub.dbReloadDict() # get a fresh copy of the subscription + + # Add nsi if not present. This happens when the first notification is sent after enabling the recording + if sub.nsi is None: + self.validateAndConstructNotificationStatsInfo(sub, True) # nsi is filled here again + for each in sub.nsi: each['noec'] += 1 sub.dbUpdate(True) @@ -930,12 +980,13 @@ def updateOfNSEAttribute(self, sub:CRS|SUB, newNse:bool) -> None: if newNse == False: pass # Stop collecting, but keep notificationStatsInfo else: # Both are True - sub.setAttribute('nsi', []) - self.validateAndConstructNotificationStatsInfo(sub) # nsi is filled here again + # Remove the nsi + sub.delAttribute('nsi') + # After SDS-2022-184R01: nsi is not added yet, but when the first statistics are collected. See countNotificationEvents() else: # self.nse == False if newNse == True: - sub.setAttribute('nsi', []) - self.validateAndConstructNotificationStatsInfo(sub) # nsi is filled here again + sub.delAttribute('nsi') + # After SDS-2022-184R01: nsi is not added yet, but when the first statistics are collected. See countNotificationEvents() else: # nse is removed (present in resource, but None, and neither True or False) sub.delAttribute('nsi') @@ -977,7 +1028,7 @@ def _verifyNusInSubscription(self, subscription:SUB|CRS, raise SUBSCRIPTION_VERIFICATION_INITIATION_FAILED(f'Verification request failed for: {nu}') # Add/Update NotificationStatsInfo structure - self.validateAndConstructNotificationStatsInfo(subscription) + self.validateAndConstructNotificationStatsInfo(subscription, False) # DON'T add nsi here if not present ######################################################################### @@ -986,7 +1037,7 @@ def _verifyNusInSubscription(self, subscription:SUB|CRS, def sendVerificationRequest(self, uri:Union[str, list[str]], ri:str, originator:Optional[str] = None) -> bool: - """" Define the callback function for verification notifications and send + """ Define the callback function for verification notifications and send the notification. Args: diff --git a/acme/services/Onboarding.py b/acme/services/Onboarding.py index f060d6a7..67baaadf 100644 --- a/acme/services/Onboarding.py +++ b/acme/services/Onboarding.py @@ -106,6 +106,8 @@ def basicConfig() -> None: value = 'Regular'), Choice(name = 'Headless - Like "regular", plus disable most screen output, and the console and text UIs', value = 'Headless'), + Choice(name = 'WSGI - Like "regular", but enable a WSGI server instead of the built-in HTTP server', + value = 'WSGI'), ], default = 'Development', transformer = lambda result: result.split()[0], @@ -221,7 +223,7 @@ def registrarConfig() -> InquirerPySessionResult: amark = '✓', invalid_message = 'Invalid IPv4 or IPv6 address or hostname.', ).execute(), - 'httpPort': inquirer.number( + 'registrarCsePort': inquirer.number( message = 'The Registrar CSE\' host http port:', default = _iniValues[cseType]['registrarCsePort'], long_instruction = 'The TCP port of the remote (Registrar) CSE.', @@ -336,6 +338,12 @@ def csePolicies() -> InquirerPySessionResult: allowedCSROriginators=id-in,id-mn,id-asn """ + cnfRegular = \ +""" + +""" + + cnfDevelopment = \ """ [textui] @@ -365,20 +373,29 @@ def csePolicies() -> InquirerPySessionResult: """ [console] headless=True +""" + + cnfWSGI = \ +""" +[http.wsgi] +enable=True """ # Construct the configuration jcnf = '[basic.config]\n' + '\n'.join(cnf) + cnfExtra # add more mode-specific configurations - if cseEnvironment in ('Development'): - jcnf += cnfDevelopment - - if cseEnvironment in ('Introduction'): - jcnf += cnfIntroduction - - if cseEnvironment in ('Headless'): - jcnf += cnfHeadless + match cseEnvironment: + case 'Regular': + jcnf += cnfRegular + case 'Development': + jcnf += cnfDevelopment + case 'Introduction': + jcnf += cnfIntroduction + case 'Headless': + jcnf += cnfHeadless + case 'WSGI': + jcnf += cnfWSGI # Show configuration and confirm write _print('\n[b]Save configuration\n') diff --git a/acme/services/RegistrationManager.py b/acme/services/RegistrationManager.py index e092cead..647d7c40 100644 --- a/acme/services/RegistrationManager.py +++ b/acme/services/RegistrationManager.py @@ -8,11 +8,9 @@ # from __future__ import annotations -from typing import List, cast, Any, Optional +from typing import Any, Optional -from copy import deepcopy - -from ..etc.Types import Permission, ResourceTypes, JSON, CSEType +from ..etc.Types import ResourceTypes, JSON, CSEType from ..etc.ResponseStatusCodes import APP_RULE_VALIDATION_FAILED, ORIGINATOR_HAS_ALREADY_REGISTERED, INVALID_CHILD_RESOURCE_TYPE from ..etc.ResponseStatusCodes import BAD_REQUEST, OPERATION_NOT_ALLOWED, CONFLICT, ResponseException from ..etc.Utils import uniqueAEI, getIdFromOriginator, uniqueRN @@ -88,11 +86,11 @@ def configUpdate(self, name:str, value:Any = None) -> None: """ Handle configuration updates. """ - if key not in [ 'cse.checkExpirationsInterval', + if key not in ( 'cse.checkExpirationsInterval', 'cse.registration.allowedCSROriginators', 'cse.registration.allowedAEOriginators', 'cse.enableResourceExpiration', - 'resource.acp.selfPermission']: + 'resource.acp.selfPermission'): return self._assignConfig() self.restartExpirationMonitor() @@ -116,31 +114,27 @@ def checkResourceCreation(self, resource:Resource, originator:str, parentResource:Optional[Resource] = None) -> str: # Some Resources are not allowed to be created in a request, return immediately - ty = resource.ty - - if ty == ResourceTypes.AE: - originator = self.handleAERegistration(resource, originator, parentResource) - - elif ty == ResourceTypes.REQ: - if not self.handleREQRegistration(resource, originator): - raise BAD_REQUEST('cannot register REQ') - - elif ty == ResourceTypes.CSR: - if CSE.cseType == CSEType.ASN: - raise OPERATION_NOT_ALLOWED('cannot register to ASN CSE') - try: - self.handleCSRRegistration(resource, originator) - except ResponseException as e: - e.dbg = f'cannot register CSR: {e.dbg}' - raise e - - elif ty == ResourceTypes.CSEBaseAnnc: - try: - self.handleCSEBaseAnncRegistration(resource, originator) - except ResponseException as e: - e.dbg = f'cannot register CSEBaseAnnc: {e.dbg}' - raise e - # fall-through + + match resource.ty: + case ResourceTypes.AE: + originator = self.handleAERegistration(resource, originator, parentResource) + case ResourceTypes.CSR: + if CSE.cseType == CSEType.ASN: + raise OPERATION_NOT_ALLOWED('cannot register to ASN CSE') + try: + self.handleCSRRegistration(resource, originator) + except ResponseException as e: + e.dbg = f'cannot register CSR: {e.dbg}' + raise e + case ResourceTypes.REQ: + if not self.handleREQRegistration(resource, originator): + raise BAD_REQUEST('cannot register REQ') + case ResourceTypes.CSEBaseAnnc: + try: + self.handleCSEBaseAnncRegistration(resource, originator) + except ResponseException as e: + e.dbg = f'cannot register CSEBaseAnnc: {e.dbg}' + raise e # Test and set creator attribute. self.handleCreator(resource, originator) @@ -155,13 +149,13 @@ def postResourceCreation(self, resource:Resource) -> None: Args: resource: Resource that was created. """ - ty = resource.ty - if ty == ResourceTypes.AE: - # Send event - self._eventAEHasRegistered(resource) - elif ty == ResourceTypes.CSR: - # send event - self._eventRegistreeCSEHasRegistered(resource) + match resource.ty: + case ResourceTypes.AE: + # Send event + self._eventAEHasRegistered(resource) + case ResourceTypes.CSR: + # send event + self._eventRegistreeCSEHasRegistered(resource) def handleCreator(self, resource:Resource, originator:str) -> None: @@ -184,19 +178,16 @@ def checkResourceUpdate(self, resource:Resource, updateDict:JSON) -> None: def checkResourceDeletion(self, resource:Resource) -> None: - ty = resource.ty - if ty == ResourceTypes.AE: - if not self.handleAEDeRegistration(resource): - raise BAD_REQUEST('cannot deregister AE') - - elif ty == ResourceTypes.REQ: - if not self.handleREQDeRegistration(resource): - raise BAD_REQUEST('cannot deregister REQ') - - elif ty == ResourceTypes.CSR: - if not self.handleRegistreeCSRDeRegistration(resource): - raise BAD_REQUEST('cannot deregister CSR') - # fall-through + match resource.ty: + case ResourceTypes.AE: + if not self.handleAEDeRegistration(resource): + raise BAD_REQUEST('cannot deregister AE') + case ResourceTypes.REQ: + if not self.handleREQDeRegistration(resource): + raise BAD_REQUEST('cannot deregister REQ') + case ResourceTypes.CSR: + if not self.handleRegistreeCSRDeRegistration(resource): + raise BAD_REQUEST('cannot deregister CSR') def postResourceDeletion(self, resource:Resource) -> None: @@ -205,13 +196,13 @@ def postResourceDeletion(self, resource:Resource) -> None: Args: resource: Resource that was created. """ - ty = resource.ty - if ty == ResourceTypes.AE: - # Send event - self._eventAEHasDeregistered(resource) - elif ty == ResourceTypes.CSR: - # send event - self._eventRegistreeCSEHasDeregistered(resource) + match resource.ty: + case ResourceTypes.AE: + # Send event + self._eventAEHasDeregistered(resource) + case ResourceTypes.CSR: + # send event + self._eventRegistreeCSEHasDeregistered(resource) ######################################################################### @@ -235,14 +226,13 @@ def handleAERegistration(self, ae:Resource, originator:str, parentResource:Resou raise APP_RULE_VALIDATION_FAILED(L.logDebug('Originator not allowed')) # Assign originator for the AE - if originator == 'C': - originator = uniqueAEI('C') - elif originator == 'S': - originator = uniqueAEI('S') - elif originator is not None: # Allow empty originators - originator = getIdFromOriginator(originator) - # elif originator is None or len(originator) == 0: - # originator = uniqueAEI('S') + match originator: + case 'C': + originator = uniqueAEI('C') + case 'S': + originator = uniqueAEI('S') + case x if x is not None: + originator = getIdFromOriginator(originator) # Check whether an originator has already registered with the same AE-ID if self.hasRegisteredAE(originator): diff --git a/acme/services/RemoteCSEManager.py b/acme/services/RemoteCSEManager.py index 35c74cd7..d235c89b 100644 --- a/acme/services/RemoteCSEManager.py +++ b/acme/services/RemoteCSEManager.py @@ -527,7 +527,7 @@ def _retrieveLocalCSRResources(self, includeRegistrarCSR:Optional[bool] = False, A list of found CSR resources. """ registreeCsrList = [] - for eachCSR in CSE.dispatcher.directChildResources(pi = CSE.cseRi, ty = ResourceTypes.CSR): + for eachCSR in CSE.dispatcher.retrieveDirectChildResources(pi = CSE.cseRi, ty = ResourceTypes.CSR): if eachCSR.csi == self.registrarCSI: # type: ignore[name-defined] if includeRegistrarCSR: registreeCsrList.append(eachCSR) diff --git a/acme/services/RequestManager.py b/acme/services/RequestManager.py index a64f0855..57217d28 100644 --- a/acme/services/RequestManager.py +++ b/acme/services/RequestManager.py @@ -19,9 +19,9 @@ from ..etc.Types import ResponseStatusCode, ResultContentType, RequestStatus, CSERequest, RequestHandler from ..etc.Types import ResourceTypes, ResponseStatusCode, ResponseType, Result, EventCategory from ..etc.Types import CSERequest, ContentSerializationType, RequestResponseList, RequestResponse -from ..etc.ResponseStatusCodes import ResponseException, exceptionFromRSC +from ..etc.ResponseStatusCodes import ResponseException from ..etc.ResponseStatusCodes import BAD_REQUEST, NOT_FOUND, REQUEST_TIMEOUT, RELEASE_VERSION_NOT_SUPPORTED -from ..etc.ResponseStatusCodes import UNSUPPORTED_MEDIA_TYPE, OPERATION_NOT_ALLOWED +from ..etc.ResponseStatusCodes import UNSUPPORTED_MEDIA_TYPE, OPERATION_NOT_ALLOWED, REQUEST_TIMEOUT from ..etc.DateUtils import getResourceDate, fromAbsRelTimestamp, utcTime, waitFor, toISO8601Date, fromDuration from ..etc.RequestUtils import requestFromResult, determineSerialization, deserializeData from ..etc.Utils import isCSERelative, toSPRelative, isValidCSI, isValidAEI, uniqueRI, isURL, isAbsolute, isSPRelative @@ -277,7 +277,7 @@ def processRequest(self, request:CSERequest, originator:str, id:str) -> Result: Request result """ return self.requestHandlers[request.op].dispatcherRequest(request, originator, id) - + def handleReceivedNotifyRequest(self, id:str, request:CSERequest, originator:str) -> Result: """ Handle a NOTIFY request to resource. @@ -306,17 +306,16 @@ def handleReceivedNotifyRequest(self, id:str, request:CSERequest, originator:str def retrieveRequest(self, request:CSERequest) -> Result: L.isDebug and L.logDebug(f'RETRIEVE ID: {request.id if request.id else request.srn}, originator: {request.originator}') - if request.rt == ResponseType.blockingRequest: - return CSE.dispatcher.processRetrieveRequest(request, request.originator) - - elif request.rt in [ ResponseType.nonBlockingRequestSynch, ResponseType.nonBlockingRequestAsynch ]: - return self._handleNonBlockingRequest(request) - - elif request.rt == ResponseType.flexBlocking: - if self.flexBlockingBlocking: # flexBlocking as blocking - return CSE.dispatcher.processRetrieveRequest(request, request .originator) - else: # flexBlocking as non-blocking + match request.rt: + case ResponseType.blockingRequest: + return CSE.dispatcher.processRetrieveRequest(request, request.originator) + case ResponseType.nonBlockingRequestSynch | ResponseType.nonBlockingRequestAsynch: return self._handleNonBlockingRequest(request) + case ResponseType.flexBlocking: + if self.flexBlockingBlocking: # flexBlocking as blocking + return CSE.dispatcher.processRetrieveRequest(request, request .originator) + else: # flexBlocking as non-blocking + return self._handleNonBlockingRequest(request) raise BAD_REQUEST(f'Unknown or unsupported ResponseType: {request.rt}') @@ -333,17 +332,16 @@ def createRequest(self, request:CSERequest) -> Result: if request.ty == None: raise BAD_REQUEST('missing or wrong resourceType in request') - if request.rt == ResponseType.blockingRequest: - return CSE.dispatcher.processCreateRequest(request, request.originator) - - elif request.rt in [ ResponseType.nonBlockingRequestSynch, ResponseType.nonBlockingRequestAsynch ]: - return self._handleNonBlockingRequest(request) - - elif request.rt == ResponseType.flexBlocking: - if self.flexBlockingBlocking: # flexBlocking as blocking + match request.rt: + case ResponseType.blockingRequest: return CSE.dispatcher.processCreateRequest(request, request.originator) - else: # flexBlocking as non-blocking + case ResponseType.nonBlockingRequestSynch | ResponseType.nonBlockingRequestAsynch: return self._handleNonBlockingRequest(request) + case ResponseType.flexBlocking: + if self.flexBlockingBlocking: # flexBlocking as blocking + return CSE.dispatcher.processCreateRequest(request, request.originator) + else: # flexBlocking as non-blocking + return self._handleNonBlockingRequest(request) raise BAD_REQUEST(f'Unknown or unsupported ResponseType: {request.rt}') @@ -361,17 +359,16 @@ def updateRequest(self, request:CSERequest) -> Result: raise OPERATION_NOT_ALLOWED('operation not allowed for CSEBase') # Check contentType and resourceType - if request.rt == ResponseType.blockingRequest: - return CSE.dispatcher.processUpdateRequest(request, request.originator) - - elif request.rt in [ ResponseType.nonBlockingRequestSynch, ResponseType.nonBlockingRequestAsynch ]: - return self._handleNonBlockingRequest(request) - - elif request.rt == ResponseType.flexBlocking: - if self.flexBlockingBlocking: # flexBlocking as blocking + match request.rt: + case ResponseType.blockingRequest: return CSE.dispatcher.processUpdateRequest(request, request.originator) - else: # flexBlocking as non-blocking + case ResponseType.nonBlockingRequestSynch | ResponseType.nonBlockingRequestAsynch: return self._handleNonBlockingRequest(request) + case ResponseType.flexBlocking: + if self.flexBlockingBlocking: # flexBlocking as blocking + return CSE.dispatcher.processUpdateRequest(request, request.originator) + else: # flexBlocking as non-blocking + return self._handleNonBlockingRequest(request) raise BAD_REQUEST(f'Unknown or unsupported ResponseType: {request.rt}') @@ -387,20 +384,19 @@ def deleteRequest(self, request:CSERequest,) -> Result: # Don't delete the CSEBase if request.id in [ CSE.cseRi, CSE.cseRi, CSE.cseRn ]: - raise OPERATION_NOT_ALLOWED(dbg = 'DELETE operation is not allowed for CSEBase') + raise OPERATION_NOT_ALLOWED('DELETE operation is not allowed for CSEBase') - if request.rt == ResponseType.blockingRequest or (request.rt == ResponseType.flexBlocking and self.flexBlockingBlocking): - return CSE.dispatcher.processDeleteRequest(request, request.originator) - - elif request.rt in [ ResponseType.nonBlockingRequestSynch, ResponseType.nonBlockingRequestAsynch ]: - return self._handleNonBlockingRequest(request) - - elif request.rt == ResponseType.flexBlocking: - if self.flexBlockingBlocking: # flexBlocking as blocking + match request.rt: + case ResponseType.blockingRequest: return CSE.dispatcher.processDeleteRequest(request, request.originator) - else: # flexBlocking as non-blocking + case ResponseType.nonBlockingRequestSynch | ResponseType.nonBlockingRequestAsynch: return self._handleNonBlockingRequest(request) - + case ResponseType.flexBlocking: # flexBlocking as non-blocking + if self.flexBlockingBlocking: # flexBlocking as blocking + return CSE.dispatcher.processDeleteRequest(request, request.originator) + else: # flexBlocking as non-blocking + return self._handleNonBlockingRequest(request) + raise BAD_REQUEST(f'Unknown or unsupported ResponseType: {request.rt}') @@ -412,18 +408,17 @@ def deleteRequest(self, request:CSERequest,) -> Result: def notifyRequest(self, request:CSERequest) -> Result: L.isDebug and L.logDebug(f'NOTIFY ID: {request.id if request.id else request.srn}, originator: {request.originator}') - # Check contentType and resourceType - if request.rt == ResponseType.blockingRequest: - return CSE.dispatcher.processNotifyRequest(request, request.originator) - - elif request.rt in [ ResponseType.nonBlockingRequestSynch, ResponseType.nonBlockingRequestAsynch ]: - return self._handleNonBlockingRequest(request) - elif request.rt == ResponseType.flexBlocking: - if self.flexBlockingBlocking: # flexBlocking as blocking + match request.rt: + case ResponseType.blockingRequest: return CSE.dispatcher.processNotifyRequest(request, request.originator) - else: # flexBlocking as non-blocking + case ResponseType.nonBlockingRequestSynch | ResponseType.nonBlockingRequestAsynch: return self._handleNonBlockingRequest(request) + case ResponseType.flexBlocking: + if self.flexBlockingBlocking: # flexBlocking as blocking + return CSE.dispatcher.processNotifyRequest(request, request.originator) + else: # flexBlocking as non-blocking + return self._handleNonBlockingRequest(request) raise BAD_REQUEST(f'Unknown or unsupported ResponseType: {request.rt}') @@ -537,16 +532,37 @@ def _runNonBlockingRequestAsync(self, request:CSERequest, reqRi:str) -> bool: def _executeOperation(self, request:CSERequest, reqRi:str) -> REQ: """ Execute a request operation and fill the respective request resource accordingly. + + Args: + request: The request to execute. + reqRi: The resource id. + + Return: + The resource. """ # Execute the actual operation in the dispatcher pc = None try: - operationResult = self.requestHandlers[request.op].dispatcherRequest(request, request.originator) + try: + operationResult = self.requestHandlers[request.op].dispatcherRequest(request, request.originator) + except REQUEST_TIMEOUT: + pass + except ResponseException as e: + raise e + # attributes set below in the request rs = RequestStatus.COMPLETED rsc = operationResult.rsc if operationResult.resource: - pc = operationResult.resource.asDict() + if isinstance(operationResult.resource, Resource): + pc = operationResult.resource.asDict() + else: + # Handle and remove the internal incomplete indicator + if operationResult.resource.get('acme:incomplete'): + rs = RequestStatus.PARTIALLY_COMPLETED + del operationResult.resource['acme:incomplete'] + pc = operationResult.resource + except ResponseException as e: # attributes set below in the request @@ -642,28 +658,6 @@ def handleTransitNotifyRequest(self, request:CSERequest) -> Result: return self.handleSendRequest(request)[0].result # there should be at least one result - # def _getForwardURL(self, path:str) -> Optional[str]: # FIXME DELETE ME This may be removed due to the new request handling - # """ Get the new target URL when forwarding. - # """ - # # L.isDebug and L.logDebug(path) - # csr, pe = CSE.remote.getCSRFromPath(path) - # # L.isDebug and L.logDebug(csr) - # if csr and (poas := csr.poa) and len(poas) > 0: - # return f'{poas[0]}//{"/".join(pe[1:])}' # TODO check all available poas. - # return None - - - # def _constructForwardURL(self, request:CSERequest) -> str: - # """ Construct the target URL for the forward request. Add the original - # arguments. The URL is returned in Result.data . - # """ - # if not (url := self._getForwardURL(request.id)): - # raise NOT_FOUND(f'forward URL not found for id: {request.id}') - # if request.originalHttpArgs is not None and len(request.originalHttpArgs) > 0: # pass on other arguments, for discovery. Only http - # url += '?' + urllib.parse.urlencode(request.originalHttpArgs) - # return url - - def _originatorToSPRelative(self, request:CSERequest) -> None: """ Convert *from* to SP-relative format in the request. The *from* is converted in *request.originator* and *request.originalRequest*, but NOT in @@ -710,6 +704,10 @@ def queuePollingRequest(self, request:CSERequest, reqType:RequestType=RequestTyp L.isDebug and L.logDebug(f'Request must have a "requestExpirationTimestamp". Adding a default one: {ret}') request.rqet = ret request._rqetUTCts = fromAbsRelTimestamp(ret) + + # Why don't we handle the Result Expiration Timestamo request parameter here? Because it must be + # greater than the Request Expiration Timestamp, so the reqeust expires at that timestamp first anyway. + if not request.rqi: L.logErr(f'Request must have a "requestIdentifier". Ignored. {request}', showStackTrace=False) return @@ -853,7 +851,6 @@ def queueRequestForPCH( self, # If the request has no id, then use the to field if not request.id: request.id = request.to - L.logErr(f'Internal error. {request}') # Always mark the request as a REQUEST request.requestType = reqType @@ -1169,7 +1166,7 @@ def gget(dct:dict, raise BAD_REQUEST(L.logDebug('error in provided Request Expiration Timestamp'), data = cseRequest) else: if _ts < utcTime(): - raise REQUEST_TIMEOUT(L.logDebug(f'request timeout: rqet {_ts} < {utcTime()}'), data = cseRequest) + raise REQUEST_TIMEOUT(L.logDebug(f'request timeout reached: rqet {_ts} < {utcTime()}'), data = cseRequest) else: cseRequest._rqetUTCts = _ts # Re-assign "real" ISO8601 timestamp cseRequest.rqet = toISO8601Date(_ts) @@ -1180,9 +1177,14 @@ def gget(dct:dict, raise BAD_REQUEST(L.logDebug('error in provided Result Expiration Timestamp'), data = cseRequest) else: if _ts < utcTime(): - raise REQUEST_TIMEOUT(L.logDebug('result timeout'), data = cseRequest) + raise REQUEST_TIMEOUT(L.logDebug(f'result timeout reached: rset {_ts} < {utcTime()}'), data = cseRequest) else: - cseRequest.rset = toISO8601Date(_ts) # Re-assign "real" ISO8601 timestamp + cseRequest._rsetUTCts = _ts # Re-assign "real" ISO8601 timestamp + # Re-assign "real" ISO8601 timestamp + try: + cseRequest.rset = int(rset) # type: ignore [assignment] + except ValueError: + cseRequest.rset = toISO8601Date(_ts) # OET - operationExecutionTime if (oet := gget(cseRequest.originalRequest, 'oet', greedy=False)): @@ -1232,10 +1234,11 @@ def gget(dct:dict, # assign defaults when not provided if cseRequest.fc.fu != FilterUsage.discoveryCriteria: # Different defaults for each operation - if cseRequest.op in [ Operation.RETRIEVE, Operation.CREATE, Operation.UPDATE ]: - rcn = ResultContentType.attributes - elif cseRequest.op == Operation.DELETE: - rcn = ResultContentType.nothing + match cseRequest.op: + case Operation.RETRIEVE | Operation.CREATE | Operation.UPDATE: + rcn = ResultContentType.attributes + case Operation.DELETE: + rcn = ResultContentType.nothing else: # discovery-result-references as default for Discovery operation rcn = ResultContentType.discoveryResultReferences @@ -1277,18 +1280,34 @@ def gget(dct:dict, # Discovery and FilterCriteria # if fcAttrs: # only when there is a filterCriteria, copy the available attribute to the FilterCriteria structure - for h in [ 'lim', 'lvl', 'ofst', 'arp', + for h in ( 'lim', 'lvl', 'ofst', 'arp', 'crb', 'cra', 'ms', 'us', 'sts', 'stb', 'exb', 'exa', 'lbq', 'sza', 'szb', 'catr', 'patr', 'smf', - 'aq']: + 'aq'): if (v := gget(fcAttrs, h)) is not None: # may be int cseRequest.fc.set(h, v) - for h in [ 'lbl', 'cty' ]: # different handling of list attributes + for h in ( 'lbl', 'cty' ): # different handling of list attributes if (v := gget(fcAttrs, h, attributeType = BasicType.list, checkSubType = False)) is not None: cseRequest.fc.set(h, v) - for h in [ 'ty' ]: # different handling of list attributes that are normally non-lists + for h in ( 'ty', ): # different handling of list attributes that are normally non-lists if (v := gget(fcAttrs, h, attributeType = BasicType.list, checkSubType = True)) is not None: # may be int cseRequest.fc.set(h, v) + + # Handling of geo-query attributes + match len([a for a in ('gmty', 'geom', 'gsf') if a in fcAttrs]): + case 0: + pass + case 1 | 2: + raise BAD_REQUEST(L.logDebug('gmty, geom and gsf must be specified together'), data = cseRequest) + case 3: + if (v := gget(fcAttrs, 'gmty')) is not None: + cseRequest.fc.gmty = v + geom = fcAttrs.get('geom') + if (v := gget(fcAttrs, 'geom')) is not None: + cseRequest.fc.geom = geom + cseRequest.fc._geom = v + if (v := gget(fcAttrs, 'gsf')) is not None: + cseRequest.fc.gsf = v # Copy all remaining attributes as filter criteria! @@ -1508,7 +1527,7 @@ def getTargetReleaseVersion(srv:list) -> str: pollingChannelResources = [] if targetResource.rr == False: L.isDebug and L.logDebug(f'Target: {uri} is not requestReachable. Trying .') - if not len(pollingChannelResources := CSE.dispatcher.directChildResources(targetResource.ri, ResourceTypes.PCH)): + if not len(pollingChannelResources := CSE.dispatcher.retrieveDirectChildResources(targetResource.ri, ResourceTypes.PCH)): L.isWarn and L.logWarn(f'Target: {uri} is not requestReachable and does not have a .') return [] # Take the first resource and return it. There should hopefully only be one, but we don't check this here @@ -1553,14 +1572,15 @@ def recordRequest(self, request:CSERequest, result:Result) -> None: return # Construct and store request & response - if result.resource and isinstance(result.resource, Resource): - pc = result.resource.asDict() - elif isinstance(result.resource, dict): - pc = result.resource - elif result.data: - pc = result.data # type:ignore - else: - pc = None + match _resource := result.resource: + case Resource(): + pc = _resource.asDict() + case dict(): + pc = _resource + case x if result.data: + pc = result.data # type:ignore + case _: + pc = None # Determine the structure address if not (srn := request.srn): @@ -1576,6 +1596,16 @@ def recordRequest(self, request:CSERequest, result:Result) -> None: else: rid = 'unknown' + # Map the response + response = { 'rsc': result.rsc, + 'pc': pc, + 'dbg': result.dbg, + 'ot': result.request.ot if result.request and result.request.ot else getResourceDate(), + } + if request.rset: + response['rset'] = request.rset + + request.fillOriginalRequest(update = True) CSE.storage.addRequest(request.op, @@ -1585,9 +1615,5 @@ def recordRequest(self, request:CSERequest, result:Result) -> None: request._outgoing, request.ot if request.ot else toISO8601Date(request._ot), # Only convert now to ISO8601 to avoid unnecessary conversions request.originalRequest, - { 'rsc': result.rsc, - 'pc': pc, - 'dbg': result.dbg, - 'ot': result.request.ot if result.request and result.request.ot else getResourceDate(), - }) + response) \ No newline at end of file diff --git a/acme/services/ScriptManager.py b/acme/services/ScriptManager.py index 22423a76..71460bab 100644 --- a/acme/services/ScriptManager.py +++ b/acme/services/ScriptManager.py @@ -11,16 +11,16 @@ from typing import Callable, Dict, Union, Any, Tuple, cast, Optional, List from pathlib import Path -import json, os, fnmatch +import json, os, fnmatch, traceback import requests, webbrowser from decimal import Decimal from rich.text import Text from ..helpers.KeyHandler import FunctionKey -from ..etc.Types import JSON, ACMEIntEnum, CSERequest, Operation, ResourceTypes, Result +from ..etc.Types import JSON, ACMEIntEnum, CSERequest, Operation, ResourceTypes, Result, BasicType, AttributePolicy from ..etc.ResponseStatusCodes import ResponseException -from ..etc.DateUtils import cronMatchesTimestamp, getResourceDate +from ..etc.DateUtils import cronMatchesTimestamp, getResourceDate, utcDatetime from ..etc.Utils import runsInIPython, uniqueRI, isURL, uniqueID, pureResource from .Configuration import Configuration from ..helpers.Interpreter import PContext, PFuncCallable, PUndefinedError, PError, PState, SSymbol, SType, PSymbolCallable @@ -119,9 +119,11 @@ def __init__(self, symbols = { 'clear-console': self.doClearConsole, 'create-resource': self.doCreateResource, + 'cse-attribute-infos': self.doCseAttributeInfos, 'cse-status': self.doCseStatus, 'delete-resource': self.doDeleteResource, 'get-config': self.doGetConfiguration, + 'get-loglevel': self.doGetLogLevel, 'get-storage': self.doGetStorage, 'has-config': self.doHasConfiguration, 'has-storage': self.doHasStorage, @@ -145,6 +147,7 @@ def __init__(self, 'set-config': self.doSetConfig, 'set-console-logging': self.doSetLogging, 'schedule-next-script': self.doScheduleNextScript, + 'tui-notify': self.doTuiNotify, 'tui-refresh-resources': self.doTuiRefreshResources, 'tui-visual-bell': self.doTuiVisualBell, 'update-resource': self.doUpdateResource, @@ -238,7 +241,7 @@ def errorMessage(self) -> str: Return: String with the error message. """ - return f'{self.error.error.name} error in {self.scriptFilename} - {self.error.message}' + return f'"{self.error.error.name}" error in {self.scriptFilename} - {self.error.message}' @property @@ -320,6 +323,77 @@ def doCreateResource(self, pcontext:PContext, symbol:SSymbol) -> PContext: return self._handleRequest(cast(ACMEPContext, pcontext), symbol, Operation.CREATE) + def doCseAttributeInfos(self, pcontext:PContext, symbol:SSymbol) -> PContext: + """ Return a list of CSE attribute infos for the given attribute name. + The search is done over the short and long names of the attributes using + a fuzzy search when searching the long names. + + The function has the following arguments: + + - attribute name. This could be a short name or a long name. + + The function returns a quoted list where each entry is another quoted list + with the following symbols: + + - attribute short name + - attribute long name + - attribute type + + Example: + :: + + (cse-attribute-info "acop") -> ( ( "acop" "accessControlOperations" "nonNegInteger" ) ) + + Args: + pcontext: `PContext` object of the running script. + symbol: The symbol to execute. + + Return: + The updated `PContext` object with the operation result. + """ + + def _getType(t:BasicType, policy:AttributePolicy) -> str: # type:ignore [return] + match t: + case BasicType.list | BasicType.listNE if policy.lTypeName != 'enum': + return f'{policy.typeName} of {policy.lTypeName}' + case BasicType.list | BasicType.listNE if policy.lTypeName == 'enum': + return f'{policy.typeName} of {_getType(BasicType.enum, policy)}' + case BasicType.complex: + return policy.typeName + case BasicType.enum: + return f'enum ({policy.etype})' + case _: + return policy.typeName + + + pcontext.assertSymbol(symbol, 2) + + # get attribute name + pcontext, _name = pcontext.valueFromArgument(symbol, 1, SType.tString) + + result = CSE.validator.getAttributePoliciesByName(_name) + resultSymbolList = [] + if result is not None: + for policy in result: + # Determine exact type + _t = _getType(policy.type, policy) + # match policy.type: + # case BasicType.list | BasicType.listNE: + # _t = f'{policy.typeName} of {policy.lTypeName}' + # case BasicType.complex: + # _t = policy.typeName + # case BasicType.enum: + # _t = f'enum ({policy.etype})' + # case _: + # _t = policy.typeName + + resultSymbolList.append(SSymbol(lstQuote = [ SSymbol(string = policy.sname), + SSymbol(string = policy.lname), + SSymbol(string = _t) ])) + + return pcontext.setResult(SSymbol(lstQuote = resultSymbolList)) + + def doCseStatus(self, pcontext:PContext, symbol:SSymbol) -> PContext: """ Retrieve the CSE status. @@ -394,11 +468,37 @@ def doGetConfiguration(self, pcontext:PContext, symbol:SSymbol) -> PContext: # config value if (_v := Configuration.get(_key)) is None: - raise PUndefinedError(pcontext.setError(PError.undefined, f'undefined key: {_key}')) + raise PUndefinedError(pcontext.setError(PError.undefined, f'undefined configuration key: {_key}')) return pcontext.setResult(SSymbol(value = _v)) + def doGetLogLevel(self, pcontext:PContext, symbol:SSymbol) -> PContext: + """ Get the log level of the CSE. This will be one of the following strings: + + - "DEBUG" + - "INFO" + - "WARNING" + - "ERROR" + - "OFF" + + + Example: + :: + + (get-loglevel) -> "INFO" + + Args: + pcontext: PContext object of the running script. + symbol: The symbol to execute. + + Return: + The updated `PContext` object with the operation result. + """ + pcontext.assertSymbol(symbol, 1) + return pcontext.setResult(SSymbol(string = str(L.logLevel))) + + def doGetStorage(self, pcontext:PContext, symbol:SSymbol) -> PContext: """ Retrieve a value for *key* from the persistent storage *storage*. @@ -541,7 +641,6 @@ def doHttp(self, pcontext:PContext, symbol:SSymbol) -> PContext: except requests.exceptions.ConnectionError: pcontext.variables['response.status'] = SSymbol() # nil return pcontext.setResult(SSymbol()) - #print(response) # parse response and assign to variables @@ -1111,44 +1210,41 @@ def doSetConfig(self, pcontext:PContext, symbol:SSymbol) -> PContext: if Configuration.has(_key): # could be None, False, 0, empty string etc # Do some conversions first - v = Configuration.get(_key) - if isinstance(v, ACMEIntEnum): - if result.type == SType.tString: - r = Configuration.update(_key, v.__class__.to(cast(str, result.value), insensitive = True)) - else: - raise PInvalidTypeError(pcontext.setError(PError.invalid, 'configuration value must be a string')) - - elif isinstance(v, str): - if result.type == SType.tString: - r = Configuration.update(_key, cast(str, result.value).strip()) - else: - raise PInvalidTypeError(pcontext.setError(PError.invalid, 'configuration value must be a string')) - - # bool must be tested before int! - # See https://stackoverflow.com/questions/37888620/comparing-boolean-and-int-using-isinstance/37888668#37888668 - elif isinstance(v, bool): - if result.type == SType.tBool: - r = Configuration.update(_key, result.value) - else: - raise PInvalidTypeError(pcontext.setError(PError.invalidType, f'configuration value must be a boolean')) - - elif isinstance(v, int): - if result.type == SType.tNumber: - r = Configuration.update(_key, int(cast(Decimal, result.value))) - else: - raise PInvalidTypeError(pcontext.setError(PError.invalidType, f'configuration value must be an integer')) - - elif isinstance(v, float): - if result.type == SType.tNumber: - r = Configuration.update(_key, float(cast(Decimal, result.value))) - else: - raise PInvalidTypeError(pcontext.setError(PError.invalidType, f'configuration value must be a float, is: {result.type}')) - - elif isinstance(v, list): - raise PUnsupportedError(pcontext.setError(PError.invalidType, f'unsupported type: {type(v)}')) - else: - raise PUnsupportedError(pcontext.setError(PError.invalidType, f'unsupported type: {type(v)}')) + match (v := Configuration.get(_key)): + case ACMEIntEnum(): + if result.type == SType.tString: + r = Configuration.update(_key, v.__class__.to(cast(str, result.value), insensitive = True)) + else: + raise PInvalidTypeError(pcontext.setError(PError.invalid, 'configuration value must be a string')) + case str(): + if result.type == SType.tString: + r = Configuration.update(_key, cast(str, result.value).strip()) + else: + raise PInvalidTypeError(pcontext.setError(PError.invalid, 'configuration value must be a string')) + # bool must be tested before int! + # See https://stackoverflow.com/questions/37888620/comparing-boolean-and-int-using-isinstance/37888668#37888668 + case bool(): + if result.type == SType.tBool: + r = Configuration.update(_key, result.value) + else: + raise PInvalidTypeError(pcontext.setError(PError.invalidType, f'configuration value must be a boolean')) + + case int(): + if result.type == SType.tNumber: + r = Configuration.update(_key, int(cast(Decimal, result.value))) + else: + raise PInvalidTypeError(pcontext.setError(PError.invalidType, f'configuration value must be an integer')) + + case float(): + if result.type == SType.tNumber: + r = Configuration.update(_key, float(cast(Decimal, result.value))) + else: + raise PInvalidTypeError(pcontext.setError(PError.invalidType, f'configuration value must be a float, is: {result.type}')) + + case _: + raise PUnsupportedError(pcontext.setError(PError.invalidType, f'unsupported type: {type(v)}')) + # Check whether something went wrong while setting the config if r: raise PInvalidArgumentError(pcontext.setError(PError.invalid, f'Error setting configuration: {r}')) @@ -1182,6 +1278,53 @@ def doSetLogging(self, pcontext:PContext, symbol:SSymbol) -> PContext: return pcontext.setResult(SSymbol()) + def doTuiNotify(self, pcontext:PContext, symbol:SSymbol) -> PContext: + """ Show a TUI notification. + + This function is only available in TUI mode. It has the following arguments. + + - message: The message to show. + - title: (Optional) The title of the notification. + - severity: (Optional) The severity of the notification. Can be + one of the following values: *information*, *warning*, *error*. + - timeout: (Optional) The timeout in seconds after which the + notification will disappear. If not specified, the notification + will disappear after 3 seconds. + + The function returns NIL. + + Example: + :: + + (tui-notify "This is a notification") + + Args: + pcontext: `PContext` object of the running script. + symbol: The symbol to execute. + + Return: + The updated `PContext` object. + """ + pcontext.assertSymbol(symbol, minLength = 2, maxLength = 5) + + # Value + pcontext, value = pcontext.valueFromArgument(symbol, 1, SType.tString) + + # Title + pcontext, title = pcontext.valueFromArgument(symbol, 2, SType.tString, optional = True) + + # Severity + pcontext, severity = pcontext.valueFromArgument(symbol, 3, SType.tString, optional = True) + + # Timeout + pcontext, timeout = pcontext.valueFromArgument(symbol, 4, SType.tNumber, optional = True) + + # show the notification + CSE.textUI.scriptShowNotification(value, title, severity, float(timeout) if timeout is not None else None) + + return pcontext.setResult(SSymbol()) + + def doTuiRefreshResources(self, pcontext:PContext, symbol:SSymbol) -> PContext: """ Refresh the TUI resources. This will update the resource Tree and the resource details. @@ -1350,7 +1493,7 @@ def _handleRequest(self, pcontext:PContext, symbol:SSymbol, operation:Operation) if operation == Operation.CREATE: if (ty := ResourceTypes.fromTPE( list(content.keys())[0] )) is None: # first is tpe raise PInvalidArgumentError(pcontext.setError(PError.invalid, 'Cannot determine resource type')) - req['ty'] = ty + req['ty'] = ty.value # Add primitive content when content is available req['pc'] = content @@ -1362,46 +1505,46 @@ def _handleRequest(self, pcontext:PContext, symbol:SSymbol, operation:Operation) try: request = CSE.request.fillAndValidateCSERequest(req) except ResponseException as e: - raise PInvalidArgumentError(pcontext.setError(PError.invalid, f'Invalid resource: {e.dbg}')) + raise PInvalidArgumentError(pcontext.setError(PError.invalid, f'Invalid resource: {e.dbg}', exception = e)) # Send request L.isDebug and L.logDebug(f'Sending request from script: {request.originalRequest} to: {target}') if isURL(target): - if operation == Operation.RETRIEVE: - res = CSE.request.handleSendRequest(CSERequest(op = Operation.RETRIEVE, - ot = getResourceDate(), - to = target, - originator = originator) - )[0].result # there should be at least one result - - elif operation == Operation.DELETE: - res = CSE.request.handleSendRequest(CSERequest(op = Operation.DELETE, - ot = getResourceDate(), - to = target, - originator = originator) - )[0].result # there should be at least one result - elif operation == Operation.CREATE: - res = CSE.request.handleSendRequest(CSERequest(op = Operation.CREATE, - ot = getResourceDate(), - to = target, - originator = originator, - ty = ty, - pc = request.pc) - )[0].result # there should be at least one result - elif operation == Operation.UPDATE: - res = CSE.request.handleSendRequest(CSERequest(op = Operation.UPDATE, - ot = getResourceDate(), - to = target, - originator = originator, - pc = request.pc) - )[0].result # there should be at least one result - elif operation == Operation.NOTIFY: - res = CSE.request.handleSendRequest(CSERequest(op = Operation.NOTIFY, - ot = getResourceDate(), - to = target, - originator = originator, - pc = request.pc) - )[0].result # there should be at least one result + match operation: + case Operation.RETRIEVE: + res = CSE.request.handleSendRequest(CSERequest(op = Operation.RETRIEVE, + ot = getResourceDate(), + to = target, + originator = originator) + )[0].result # there should be at least one result + case Operation.DELETE: + res = CSE.request.handleSendRequest(CSERequest(op = Operation.DELETE, + ot = getResourceDate(), + to = target, + originator = originator) + )[0].result # there should be at least one result + case Operation.CREATE: + res = CSE.request.handleSendRequest(CSERequest(op = Operation.CREATE, + ot = getResourceDate(), + to = target, + originator = originator, + ty = ty, + pc = request.pc) + )[0].result # there should be at least one result + case Operation.UPDATE: + res = CSE.request.handleSendRequest(CSERequest(op = Operation.UPDATE, + ot = getResourceDate(), + to = target, + originator = originator, + pc = request.pc) + )[0].result # there should be at least one result + case Operation.NOTIFY: + res = CSE.request.handleSendRequest(CSERequest(op = Operation.NOTIFY, + ot = getResourceDate(), + to = target, + originator = originator, + pc = request.pc) + )[0].result # there should be at least one result else: # Request via CSE-ID, either local, or otherwise a transit request. Let the CSE handle it @@ -1427,6 +1570,7 @@ class ScriptManager(object): scriptDirectories: List of script directories to monitoe. scriptUpdatesMonitor: `BackgroundWorker` worker to monitor script directories. scriptCronWorker: `BackgroundWorker` worker to run cron-enabled scripts. + maxRuntime: Maximum runtime for a script. """ __slots__ = ( @@ -1439,6 +1583,7 @@ class ScriptManager(object): 'scriptDirectories', 'scriptMonitorInterval', 'verbose', + 'maxRuntime' ) """ Slots of class attributes. """ @@ -1496,6 +1641,7 @@ def _assignConfig(self) -> None: self.verbose = Configuration.get('scripting.verbose') self.scriptMonitorInterval = Configuration.get('scripting.fileMonitoringInterval') self.scriptDirectories = Configuration.get('scripting.scriptDirectories') + self.maxRuntime = Configuration.get('scripting.maxRuntime') def configUpdate(self, name:str, @@ -1510,7 +1656,8 @@ def configUpdate(self, name:str, """ if key not in [ 'scripting.verbose', 'scripting.fileMonitoringInterval', - 'scripting.scriptDirectories' + 'scripting.scriptDirectories', + 'scripting.maxRuntime' ]: return @@ -1542,8 +1689,9 @@ def cseStarted(self, name:str) -> None: if self.scriptMonitorInterval > 0.0: self.scriptUpdatesMonitor.start() - # Add a worker to check scheduled script, fixed every minute - self.scriptCronWorker = BackgroundWorkerPool.newWorker(60.0, + # Add a worker to check scheduled script, fixed every second + # TODO resolution + self.scriptCronWorker = BackgroundWorkerPool.newWorker(1, self.cronMonitor, 'scriptCronMonitor').start() @@ -1633,11 +1781,11 @@ def checkScriptUpdates(self) -> bool: del self.scripts[eachName] # Read new scripts - if CSE.importer.resourcePath: # from the init directory - if self.loadScriptsFromDirectory(CSE.importer.resourcePath) == -1: + if CSE.importer.extendedResourcePath: # from the init directory + if self.loadScriptsFromDirectory(CSE.importer.extendedResourcePath) == -1: L.isWarn and L.logWarn('Cannot import new scripts') - if CSE.script.scriptDirectories: # from the extra script directories - if self.loadScriptsFromDirectory(CSE.script.scriptDirectories) == -1: + if self.scriptDirectories: # from the extra script directories + if self.loadScriptsFromDirectory(self.scriptDirectories) == -1: L.isWarn and L.logWarn('Cannot import new scripts') return True @@ -1652,9 +1800,10 @@ def cronMonitor(self) -> bool: Boolean. Usually *True* to continue with monitoring. """ #L.isDebug and L.logDebug(f'Looking for scheduled scripts') + _ts = utcDatetime() for each in self.findScripts(meta = _metaAt): try: - if cronMatchesTimestamp(at := each.meta.get(_metaAt)): + if cronMatchesTimestamp(at := each.meta.get(_metaAt), _ts): L.isDebug and L.logDebug(f'Running script: {each.scriptName} at: {at}') self.runScript(each) except ValueError as e: @@ -1831,6 +1980,8 @@ def runCB(pcontext:PContext, arguments:list[str]) -> None: L.logDebug(f'Script terminated with result: {pcontext.result}') if pcontext.state == PState.terminatedWithError: L.logWarn(f'Script terminated with error: {pcontext.error.message}') + if pcontext.error.exception: + L.logWarn(''.join(traceback.format_exception(pcontext.error.exception))) if not result or not cast(ACMEPContext, pcontext).nextScript: return @@ -1850,6 +2001,9 @@ def runCB(pcontext:PContext, arguments:list[str]) -> None: # pcontext.setError(PError.invalid, f'Script "{pcontext.name}" is already running') return False + # Set script timeout + pcontext.setMaxRuntime(self.maxRuntime) + # Set environemt environment['tui.theme'] = SSymbol(string = CSE.textUI.theme) pcontext.setEnvironment(environment) diff --git a/acme/services/SecurityManager.py b/acme/services/SecurityManager.py index 63eb090e..6ee438a7 100644 --- a/acme/services/SecurityManager.py +++ b/acme/services/SecurityManager.py @@ -27,8 +27,6 @@ from ..services.Logging import Logging as L -# TODO move configurations to extra functions and support reconfigure event - class SecurityManager(object): """ This manager entity handles access to resources and requests. """ @@ -47,6 +45,10 @@ class SecurityManager(object): 'usernameMqtt', 'passwordMqtt', 'allowedCredentialIDsMqtt', + 'httpBasicAuthFile', + 'httpTokenAuthFile', + 'httpBasicAuthData', + 'httpTokenAuthData' ) @@ -54,6 +56,11 @@ def __init__(self) -> None: # Get the configuration settings self._assignConfig() + self._readHttpBasicAuthFile() + self._readHttpTokenAuthFile() + + # Add a handler when the CSE is reset + CSE.event.addHandler(CSE.event.cseReset, self.restart) # type: ignore # Add handler for configuration updates CSE.event.addHandler(CSE.event.configUpdate, self.configUpdate) # type: ignore @@ -68,6 +75,15 @@ def __init__(self) -> None: def shutdown(self) -> bool: L.isInfo and L.log('SecurityManager shut down') return True + + + def restart(self, name:str) -> None: + """ Restart the Security manager service. + """ + self._assignConfig() + self._readHttpBasicAuthFile() + self._readHttpTokenAuthFile() + L.logDebug('SecurityManager restarted') def _assignConfig(self) -> None: @@ -92,13 +108,23 @@ def _assignConfig(self) -> None: self.passwordMqtt = Configuration.get('mqtt.security.password') self.allowedCredentialIDsMqtt = Configuration.get('mqtt.security.allowedCredentialIDs') + # HTTP authentication + self.httpBasicAuthFile = Configuration.get('http.security.basicAuthFile') + self.httpTokenAuthFile = Configuration.get('http.security.tokenAuthFile') + + def configUpdate(self, name:str, key:Optional[str] = None, value:Any = None) -> None: """ Handle configuration updates. + + Args: + name: The name of the configuration section. + key: The key of the configuration value. + value: The new value of the configuration value. """ - if key not in [ 'cse.security.enableACPChecks', + if key not in ( 'cse.security.enableACPChecks', 'cse.security.fullAccessAdmin', 'http.security.useTLS', 'http.security.verifyCertificate', @@ -111,10 +137,13 @@ def configUpdate(self, name:str, 'mqtt.security.username', 'mqtt.security.password', 'mqtt.security.allowedCredentialIDs', - ]: + 'http.security.basicAuthFile' + ): return self._assignConfig() - return + self._readHttpBasicAuthFile() + self._readHttpTokenAuthFile() + ############################################################################################### @@ -473,6 +502,79 @@ def getSSLContext(self) -> ssl.SSLContext: return context + ########################################################################## + # + # User authentication + # + + def validateHttpBasicAuth(self, username:str, password:str) -> bool: + """ Validate the provided username and password against the configured basic authentication file. + + Args: + username: The username to validate. + password: The password to validate. + + Return: + Boolean indicating the result. + """ + return self.httpBasicAuthData.get(username) == password + + + def validateHttpTokenAuth(self, token:str) -> bool: + """ Validate the provided token against the configured token authentication file. + + Args: + token: The token to validate. + + Return: + Boolean indicating the result. + """ + return token in self.httpTokenAuthData + + + def _readHttpBasicAuthFile(self) -> None: + """ Read the HTTP basic authentication file and store the data in a dictionary. + The authentication information is stored as username:password. + + The data is stored in the `httpBasicAuthData` dictionary. + """ + self.httpBasicAuthData = {} + # We need to access the configuration directly, since the http server is not yet initialized + if Configuration.get('http.security.enableBasicAuth') and self.httpBasicAuthFile: + try: + with open(self.httpBasicAuthFile, 'r') as f: + for line in f: + if line.startswith('#'): + continue + if len(line.strip()) == 0: + continue + (username, password) = line.strip().split(':') + self.httpBasicAuthData[username] = password.strip() + except Exception as e: + L.logErr(f'Error reading basic authentication file: {e}') + + + def _readHttpTokenAuthFile(self) -> None: + """ Read the HTTP token authentication file and store the data in a dictionary. + The authentication information is stored as a single token per line. + + The data is stored in the `httpTokenAuthData` list. + """ + self.httpTokenAuthData = [] + # We need to access the configuration directly, since the http server is not yet initialized + if Configuration.get('http.security.enableTokenAuth') and self.httpTokenAuthFile: + try: + with open(self.httpTokenAuthFile, 'r') as f: + for line in f: + if line.startswith('#'): + continue + if len(line.strip()) == 0: + continue + self.httpTokenAuthData.append(line.strip()) + except Exception as e: + L.logErr(f'Error reading token authentication file: {e}') + + # def getSSLContextMqtt(self) -> ssl.SSLContext: # """ Depending on the configuration whether to use TLS for MQTT, this method creates a new `SSLContext` # from the configured certificates and returns it. If TLS for MQTT is disabled then `None` is returned. diff --git a/acme/services/Statistics.py b/acme/services/Statistics.py index c4dfb6a7..2e825a81 100644 --- a/acme/services/Statistics.py +++ b/acme/services/Statistics.py @@ -355,7 +355,7 @@ def getChildren(res:Resource, level:int) -> str: result = '' if maxLevel > 0 and level == maxLevel: return result - chs = CSE.dispatcher.directChildResources(res.ri) + chs = CSE.dispatcher.retrieveDirectChildResources(res.ri) for ch in chs: result += ' ' * 2 * level + f'|_ {ch.rn} < {ResourceTypes(ch.ty).tpe()} >\n' result += getChildren(ch, level+1) diff --git a/acme/services/Storage.py b/acme/services/Storage.py index 4d916a5d..49a9c190 100644 --- a/acme/services/Storage.py +++ b/acme/services/Storage.py @@ -10,6 +10,14 @@ # """ This module defines storage managers and drivers for database access. + + Storage managers are used to store, retrieve and manage resources and other runtime data in the database. + + Storage drivers are used to access the database. Currently, the only supported database is TinyDB. + + See also: + - `TinyDBBetterTable` + - `TinyDBBufferedStorage` """ from __future__ import annotations @@ -32,29 +40,34 @@ from ..services import CSE from ..resources.Resource import Resource from ..resources.ACTR import ACTR +from ..resources.SCH import SCH from ..resources.Factory import resourceFromDict from ..services.Logging import Logging as L # Constants for database and table names _resources = 'resources' +""" Name of the resources table. """ _identifiers = 'identifiers' +""" Name of the identifiers table. """ _children = 'children' -_srn = 'srn' +""" Name of the children table. """ _subscriptions = 'subscriptions' +""" Name of the subscriptions table. """ _batchNotifications = 'batchNotifications' +""" Name of the batchNotifications table. """ _statistics = 'statistics' +""" Name of the statistics table. """ _actions = 'actions' +""" Name of the actions table. """ _requests = 'requests' +""" Name of the requests table. """ +_schedules = 'schedules' +""" Name of the schedules table. """ class Storage(object): """ This class implements the entry points to the CSE's underlying database functions. - - Attributes: - inMemory: Indicator whether the database is located in memory (volatile) or on disk. - dbPath: In case *inMemory* is "False" this attribute contains the path to a directory where the database is stored in disk. - dbReset: Indicator that the database should be reset or cleared during start-up. """ __slots__ = ( @@ -63,9 +76,13 @@ class Storage(object): 'dbReset', 'db', ) + """ Define slots for instance variables. """ def __init__(self) -> None: """ Initialization of the storage manager. + + Raises: + RuntimeError: In case of an error during initialization. """ # create data directory @@ -80,6 +97,7 @@ def __init__(self) -> None: # create DB object and open DB self.db = TinyDBBinding(self.dbPath, CSE.cseCsi[1:]) # add CSE CSI as postfix + """ The database object. """ # Reset dbs? if self.dbReset: @@ -113,8 +131,11 @@ def _assignConfig(self) -> None: """ Assign default configurations. """ self.inMemory = Configuration.get('database.inMemory') + """ Indicator whether the database is located in memory (volatile) or on disk. """ self.dbPath = Configuration.get('database.path') + """ In case *inMemory* is "False" this attribute contains the path to a directory where the database is stored in disk. """ self.dbReset = Configuration.get('database.resetOnStartup') + """ Indicator that the database should be reset or cleared during start-up. """ def purge(self) -> None: @@ -153,6 +174,9 @@ def _validateDB(self) -> bool: self.getStatistics() dbFile = _actions self.getActions() + dbFile = _schedules + self.getSchedules() + # TODO requests except Exception as e: @@ -185,10 +209,12 @@ def createResource(self, resource:Resource, overwrite:Optional[bool] = True) -> Args: resource: The resource to store in the database. overwrite: Indicator whether an existing resource shall be overwritten. + + Raises: + CONFLICT: In case the resource already exists and *overwrite* is "False". """ ri = resource.ri srn = resource.getSrn() - # L.logDebug(f'Adding resource (ty: {resource.ty}, ri: {resource.ri}, rn: {resource.rn}, srn: {srn}') if overwrite: L.isDebug and L.logDebug('Resource enforced overwrite') self.db.upsertResource(resource, ri) @@ -199,10 +225,10 @@ def createResource(self, resource:Resource, overwrite:Optional[bool] = True) -> raise CONFLICT(L.logWarn(f'Resource already exists (Skipping): {resource} ri: {ri} srn:{srn}')) # Add path to identifiers db - self.db.insertIdentifier(resource, ri, srn) + self.db.upsertIdentifier(resource, ri, srn) # Add record to childResources db - self.db.addChildResource(resource, ri) + self.db.upsertChildResource(resource, ri) def hasResource(self, ri:Optional[str] = None, srn:Optional[str] = None) -> bool: @@ -213,6 +239,7 @@ def hasResource(self, ri:Optional[str] = None, srn:Optional[str] = None) -> bool Args: ri: Optional resource ID. srn: Optional structured resource name. + Returns: True when a resource with the ID or name exists. """ @@ -232,8 +259,13 @@ def retrieveResource(self, ri:Optional[str] = None, csi: The resource is retrieved via its CSE-ID. srn: The resource is retrieved via its structured resource name. aei: The resource is retrieved via its AE-ID. + Returns: The resource. + + Raises: + NOT_FOUND: In case the resource does not exist. + INTENRAL_SERVER_ERROR: In case of a database inconsistency. """ resources = [] @@ -253,10 +285,11 @@ def retrieveResource(self, ri:Optional[str] = None, elif aei: # get an AE by its AE-ID resources = self.db.searchResources(aei = aei) - if (l := len(resources)) == 1: - return resourceFromDict(resources[0]) - elif l == 0: - raise NOT_FOUND('resource not found') + match len(resources): + case 1: + return resourceFromDict(resources[0]) + case 0: + raise NOT_FOUND('resource not found') raise INTERNAL_SERVER_ERROR('database inconsistency') @@ -266,14 +299,21 @@ def retrieveResourceRaw(self, ri:str) -> JSON: Args: ri: The resource is retrieved via its rersource ID. + Returns: The resource dictionary. + + Raises: + NOT_FOUND: In case the resource does not exist. + INTENRAL_SERVER_ERROR: In case of a database inconsistency. """ resources = self.db.searchResources(ri = ri) - if (l := len(resources)) == 1: - return resources[0] - elif l == 0: - raise NOT_FOUND('resource not found') + match len(resources): + case 1: + return resources[0] + case 0: + raise NOT_FOUND('resource not found') + raise INTERNAL_SERVER_ERROR('database inconsistency') @@ -282,6 +322,7 @@ def retrieveResourcesByType(self, ty:ResourceTypes) -> list[Document]: Args: ty: resource type to retrieve. + Returns: List of resource *Document* objects . """ @@ -294,6 +335,7 @@ def updateResource(self, resource:Resource) -> Resource: Args: resource: Resource to update. + Return: Updated Resource object. """ @@ -307,26 +349,29 @@ def deleteResource(self, resource:Resource) -> None: Args: resource: Resource to delete. + + Raises: + NOT_FOUND: In case the resource does not exist. """ # L.logDebug(f'Removing resource (ty: {resource.ty}, ri: {resource.ri}, rn: {resource.rn})') try: self.db.deleteResource(resource) self.db.deleteIdentifier(resource) self.db.removeChildResource(resource) - except KeyError as e: - L.isDebug and L.logDebug(f'Cannot remove: {resource.ri} (NOT_FOUND). Could be an expected error.') - raise NOT_FOUND(dbg = str(e)) + except KeyError: + raise NOT_FOUND(L.logDebug(f'Cannot remove: {resource.ri} (NOT_FOUND). Could be an expected error.')) def directChildResources(self, pi:str, - ty:Optional[ResourceTypes] = None, + ty:Optional[ResourceTypes|list[ResourceTypes]] = None, raw:Optional[bool] = False) -> list[Document]|list[Resource]: """ Return a list of direct child resources, or an empty list Args: pi: The parent resource's Resource ID. - ty: Optional resource type to filter the result. + ty: Optional resource type or list of resource types to filter the result. raw: When "True" then return the child resources as resource dictionary instead of resources. + Returns: Return a list of resources, or a list of raw resource dictionaries. """ @@ -337,12 +382,13 @@ def directChildResources(self, pi:str, def directChildResourcesRI(self, pi:str, - ty:Optional[ResourceTypes] = None) -> list[str]: + ty:Optional[ResourceTypes|list[ResourceTypes]] = None) -> list[str]: """ Return a list of direct child resource IDs, or an empty list Args: pi: The parent resource's Resource ID. - ty: Optional resource type to filter the result. + ty: Optional resource type or list of resource types to filter the result. + Returns: Return a list of resource IDs. """ @@ -355,6 +401,7 @@ def countDirectChildResources(self, pi:str, ty:Optional[ResourceTypes] = None) - Args: pi: The parent resource's Resource ID. ty: Optional resource type to filter the result. + Returns: The number of child resources. """ @@ -375,6 +422,7 @@ def identifier(self, ri:str) -> list[Document]: Args: ri: Unstructured resource ID for the mapping to look for. + Return: List of found resources identifier mappings, or an empty list. """ @@ -386,6 +434,7 @@ def structuredIdentifier(self, srn:str) -> list[Document]: Args: srn: Structured resource ID for the mapping to look for. + Return: List of found resources identifier mappings, or an empty list. """ @@ -398,6 +447,7 @@ def searchByFragment(self, dct:dict, filter:Optional[Callable[[JSON], bool]] = N Args: dct: A fragment dictionary to use as a filter for the search. filter: An optional callback to provide additional filter functionality. + Return: List of `Resource` objects. """ @@ -411,6 +461,7 @@ def searchByFilter(self, filter:Callable[[JSON], bool]) -> list[Resource]: Args: filter: A callback to provide filter functionality. + Return: List of `Resource` objects. """ @@ -425,6 +476,14 @@ def searchByFilter(self, filter:Callable[[JSON], bool]) -> list[Resource]: ## def getSubscription(self, ri:str) -> Optional[Document]: + """ Retrieve a subscription representation (not a oneM2M `Resource` object) from the DB. + + Args: + ri: The subscription's resource ID. + + Return: + The subscription as a dictionary, or None. + """ # L.logDebug(f'Retrieving subscription: {ri}') subs = self.db.searchSubscriptions(ri = ri) if not subs or len(subs) != 1: @@ -433,21 +492,59 @@ def getSubscription(self, ri:str) -> Optional[Document]: def getSubscriptionsForParent(self, pi:str) -> list[Document]: + """ Retrieve all subscriptions representations (not oneM2M `Resource` objects) for a parent resource. + + Args: + pi: The parent resource's resource ID. + + Return: + List of subscriptions. + """ # L.logDebug(f'Retrieving subscriptions for parent: {pi}') return self.db.searchSubscriptions(pi = pi) def addSubscription(self, subscription:Resource) -> bool: + """ Add a subscription to the DB. + + Args: + subscription: The subscription `Resource` to add. + + Return: + Boolean value to indicate success or failure. + """ # L.logDebug(f'Adding subscription: {ri}') return self.db.upsertSubscription(subscription) def removeSubscription(self, subscription:Resource) -> bool: + """ Remove a subscription from the DB. + + Args: + subscription: The subscription `Resource` to remove. + + Return: + Boolean value to indicate success or failure. + + Raises: + NOT_FOUND: In case the subscription does not exist. + """ # L.logDebug(f'Removing subscription: {subscription.ri}') - return self.db.removeSubscription(subscription) + try: + return self.db.removeSubscription(subscription) + except KeyError as e: + raise NOT_FOUND(L.logDebug(f'Cannot subscription data for: {subscription.ri} (NOT_FOUND). Could be an expected error.')) def updateSubscription(self, subscription:Resource) -> bool: + """ Update a subscription representation in the DB. + + Args: + subscription: The subscription `Resource` to update. + + Return: + Boolean value to indicate success or failure. + """ # L.logDebug(f'Updating subscription: {ri}') return self.db.upsertSubscription(subscription) @@ -458,20 +555,56 @@ def updateSubscription(self, subscription:Resource) -> bool: ## def addBatchNotification(self, ri:str, nu:str, request:JSON) -> bool: + """ Add a batch notification to the DB. + + Args: + ri: The resource ID of the target resource. + nu: The notification URI. + request: The request to store. + + Return: + Boolean value to indicate success or failure. + """ return self.db.addBatchNotification(ri, nu, request) def countBatchNotifications(self, ri:str, nu:str) -> int: + """ Count the number of batch notifications for a target resource and a notification URI. + + Args: + ri: The resource ID of the target resource. + nu: The notification URI. + + Return: + The number of matching batch notifications. + """ return self.db.countBatchNotifications(ri, nu) def getBatchNotifications(self, ri:str, nu:str) -> list[Document]: + """ Retrieve the batch notifications for a target resource and a notification URI. + + Args: + ri: The resource ID of the target resource. + nu: The notification URI. + + Return: + List of batch notifications. + """ return self.db.getBatchNotifications(ri, nu) def removeBatchNotifications(self, ri:str, nu:str) -> bool: - return self.db.removeBatchNotifications(ri, nu) + """ Remove the batch notifications for a target resource and a notification URI. + Args: + ri: The resource ID of the target resource. + nu: The notification URI. + + Return: + Boolean value to indicate success or failure. + """ + return self.db.removeBatchNotifications(ri, nu) ######################################################################### @@ -481,53 +614,107 @@ def removeBatchNotifications(self, ri:str, nu:str) -> bool: def getStatistics(self) -> JSON: """ Retrieve the statistics data from the DB. + + Return: + The statistics data as a JSON dictionary. """ return self.db.searchStatistics() def updateStatistics(self, stats:JSON) -> bool: """ Update the statistics DB with new data. + + Args: + stats: The statistics data to store. + + Return: + Boolean value to indicate success or failure. """ return self.db.upsertStatistics(stats) def purgeStatistics(self) -> None: """ Purge the statistics DB. + + Return: + Boolean value to indicate success or failure. """ self.db.purgeStatistics() - ######################################################################### ## ## Actions ## def getActions(self) -> list[Document]: - """ Retrieve the actions data from the DB. + """ Retrieve all action representations from the DB. + + Return: + List of *Documents*. May be empty. """ return self.db.searchActionReprs() def getAction(self, ri:str) -> Optional[Document]: - """ Retrieve the actions data from the DB. + """ Retrieve the actions representation from the DB. + + Args: + ri: The action's resource ID. + + Return: + The action's data as a *Document*, or None. """ return self.db.getAction(ri) def searchActionsForSubject(self, ri:str) -> Sequence[JSON]: + """ Search for actions for a subject resource. + + Args: + ri: The subject resource's resource ID. + + Return: + List of matching action representations. + """ return self.db.searchActionsDeprsForSubject(ri) def updateAction(self, action:ACTR, period:float, count:int) -> bool: + """ Update or add an action representation in the DB. + + Args: + action: The action to update or insert. + period: The period for the action. + count: The run count for the action. + + Return: + Boolean value to indicate success or failure. + """ return self.db.upsertActionRepr(action, period, count) def updateActionRepr(self, actionRepr:JSON) -> bool: + """ Update an action representation in the DB. + + Args: + actionRepr: The action representation to update. + + Return: + Boolean value to indicate success or failure. + """ return self.db.updateActionRepr(actionRepr) def removeAction(self, ri:str) -> bool: + """ Remove an action representation from the DB. + + Args: + ri: The action's resource ID. + + Return: + Boolean value to indicate success or failure. + """ return self.db.removeActionRepr(ri) @@ -567,6 +754,7 @@ def getRequests(self, ri:Optional[str] = None, sortedByOt:bool = False) -> list[ Args: ri: The target resource's resource ID. If *None* or empty, then all requests are returned + sortedByOt: If true, then the requests are sorted by their creation time. Return: List of *Documents*. May be empty. @@ -586,6 +774,57 @@ def deleteRequests(self, ri:Optional[str] = None) -> None: return self.db.deleteRequests(ri) + ######################################################################### + ## + ## Schedules + ## + + def getSchedules(self) -> list[Document]: + """ Retrieve the schedules data from the DB. + + Return: + List of *Documents*. May be empty. + """ + return self.db.getSchedules() + + + def searchScheduleForTarget(self, pi:str) -> list[str]: + """ Search for schedules for a target resource. + + Args: + pi: The target resource's resource ID. + + Return: + List of schedule resource IDs. + """ + result = [] + for s in self.db.searchSchedules(pi): + result.extend(s['sce']) + return result + + + def upsertSchedule(self, schedule:SCH) -> bool: + """ Add or update a schedule in the DB. + + Args: + schedule: The schedule to add or update. + + Return: + Boolean value to indicate success or failure. + """ + return self.db.upsertSchedule(schedule.ri, schedule.pi, schedule.attribute('se/sce')) + + + def removeSchedule(self, schedule:SCH) -> bool: + """ Remove a schedule from the DB. + + Args: + schedule: The schedule to remove. + + Return: + Boolean value to indicate success or failure. + """ + return self.db.removeSchedule(schedule.ri) ######################################################################### # @@ -595,6 +834,8 @@ def deleteRequests(self, ri:Optional[str] = None) -> None: class TinyDBBinding(object): + """ This class implements the TinyDB binding to the database. It is used by the Storage class. + """ __slots__ = ( 'path', @@ -611,6 +852,7 @@ class TinyDBBinding(object): 'lockStatistics', 'lockActions', 'lockRequests', + 'lockSchedules', 'fileResources', 'fileIdentifiers', @@ -619,6 +861,7 @@ class TinyDBBinding(object): 'fileStatistics', 'fileActions', 'fileRequests', + 'fileSchedules', 'dbResources', 'dbIdentifiers', @@ -627,6 +870,7 @@ class TinyDBBinding(object): 'dbStatistics', 'dbActions', 'dbRequests', + 'dbSchedules', 'tabResources', 'tabIdentifiers', @@ -637,6 +881,7 @@ class TinyDBBinding(object): 'tabStatistics', 'tabActions', 'tabRequests', + 'tabSchedules', 'resourceQuery', 'identifierQuery', @@ -644,109 +889,177 @@ class TinyDBBinding(object): 'batchNotificationQuery', 'actionsQuery', 'requestsQuery', + 'schedulesQuery', ) + """ Define slots for instance variables. """ def __init__(self, path:str, postfix:str) -> None: + """ Initialize the TinyDB binding. + + Args: + path: Path to the database directory. + postfix: Postfix for the database file names. + """ + self.path = path + """ Path to the database directory. """ self._assignConfig() + """ Assign configuration values. """ L.isInfo and L.log(f'Cache Size: {self.cacheSize:d}') # create transaction locks self.lockResources = Lock() + """ Lock for the resources table.""" self.lockIdentifiers = Lock() + """ Lock for the identifiers table.""" self.lockChildResources = Lock() + """ Lock for the childResources table.""" self.lockStructuredIDs = Lock() + """ Lock for the structuredIDs table.""" self.lockSubscriptions = Lock() + """ Lock for the subscriptions table.""" self.lockBatchNotifications = Lock() + """ Lock for the batchNotifications table.""" self.lockStatistics = Lock() + """ Lock for the statistics table.""" self.lockActions = Lock() + """ Lock for the actions table.""" self.lockRequests = Lock() + """ Lock for the requests table.""" + self.lockSchedules = Lock() + """ Lock for the schedules table.""" # file names self.fileResources = f'{self.path}/{_resources}-{postfix}.json' + """ Filename for the resources table.""" self.fileIdentifiers = f'{self.path}/{_identifiers}-{postfix}.json' + """ Filename for the identifiers table.""" self.fileSubscriptions = f'{self.path}/{_subscriptions}-{postfix}.json' + """ Filename for the subscriptions table.""" self.fileBatchNotifications = f'{self.path}/{_batchNotifications}-{postfix}.json' + """ Filename for the batchNotifications table.""" self.fileStatistics = f'{self.path}/{_statistics}-{postfix}.json' + """ Filename for the statistics table.""" self.fileActions = f'{self.path}/{_actions}-{postfix}.json' + """ Filename for the actions table.""" self.fileRequests = f'{self.path}/{_requests}-{postfix}.json' + """ Filename for the requests table.""" + self.fileSchedules = f'{self.path}/{_schedules}-{postfix}.json' + """ Filename for the schedules table.""" # All databases/tables will use the smart query cache if Configuration.get('database.inMemory'): L.isInfo and L.log('DB in memory') self.dbResources = TinyDB(storage = MemoryStorage) + """ The TinyDB database for the resources table.""" self.dbIdentifiers = TinyDB(storage = MemoryStorage) + """ The TinyDB database for the identifiers table.""" self.dbSubscriptions = TinyDB(storage = MemoryStorage) + """ The TinyDB database for the subscriptions table.""" self.dbBatchNotifications = TinyDB(storage = MemoryStorage) + """ The TinyDB database for the batchNotifications table.""" self.dbStatistics = TinyDB(storage = MemoryStorage) + """ The TinyDB database for the statistics table.""" self.dbActions = TinyDB(storage = MemoryStorage) + """ The TinyDB database for the actions table.""" self.dbRequests = TinyDB(storage = MemoryStorage) + """ The TinyDB database for the requests table.""" + self.dbSchedules = TinyDB(storage = MemoryStorage) + """ The TinyDB database for the schedules table.""" else: L.isInfo and L.log('DB in file system') - # self.dbResources = TinyDB(self.fileResources) - # self.dbIdentifiers = TinyDB(self.fileIdentifiers) - # self.dbSubscriptions = TinyDB(self.fileSubscriptions) - # self.dbBatchNotifications = TinyDB(self.fileBatchNotifications) - # self.dbStatistics = TinyDB(self.fileStatistics) - # self.dbActions = TinyDB(self.fileActions) - - # EXPERIMENTAL Using TinyDBBufferedStorage - Buffers read and writes to disk self.dbResources = TinyDB(self.fileResources, storage = TinyDBBufferedStorage, write_delay = self.writeDelay) + """ The TinyDB database for the resources table.""" self.dbIdentifiers = TinyDB(self.fileIdentifiers, storage = TinyDBBufferedStorage, write_delay = self.writeDelay) + """ The TinyDB database for the identifiers table.""" self.dbSubscriptions = TinyDB(self.fileSubscriptions, storage = TinyDBBufferedStorage, write_delay = self.writeDelay) + """ The TinyDB database for the subscriptions table.""" self.dbBatchNotifications = TinyDB(self.fileBatchNotifications, storage = TinyDBBufferedStorage, write_delay = self.writeDelay) + """ The TinyDB database for the batchNotifications table.""" self.dbStatistics = TinyDB(self.fileStatistics, storage = TinyDBBufferedStorage, write_delay = self.writeDelay) + """ The TinyDB database for the statistics table.""" self.dbActions = TinyDB(self.fileActions, storage = TinyDBBufferedStorage, write_delay = self.writeDelay) + """ The TinyDB database for the actions table.""" self.dbRequests = TinyDB(self.fileRequests, storage = TinyDBBufferedStorage, write_delay = self.writeDelay) + """ The TinyDB database for the requests table.""" + self.dbSchedules = TinyDB(self.fileSchedules, storage = TinyDBBufferedStorage, write_delay = self.writeDelay) + """ The TinyDB database for the schedules table.""" # Open/Create tables self.tabResources = self.dbResources.table(_resources, cache_size = self.cacheSize) + """ The TinyDB table for the resources table.""" TinyDBBetterTable.assign(self.tabResources) self.tabIdentifiers = self.dbIdentifiers.table(_identifiers, cache_size = self.cacheSize) + """ The TinyDB table for the identifiers table.""" TinyDBBetterTable.assign(self.tabIdentifiers) self.tabChildResources = self.dbIdentifiers.table(_children, cache_size = self.cacheSize) + """ The TinyDB table for the childResources table.""" TinyDBBetterTable.assign(self.tabChildResources) self.tabStructuredIDs = self.dbIdentifiers.table('srn', cache_size = self.cacheSize) + """ The TinyDB table for the structuredIDs table.""" TinyDBBetterTable.assign(self.tabStructuredIDs) self.tabSubscriptions = self.dbSubscriptions.table(_subscriptions, cache_size = self.cacheSize) + """ The TinyDB table for the subscriptions table.""" TinyDBBetterTable.assign(self.tabSubscriptions) self.tabBatchNotifications = self.dbBatchNotifications.table(_batchNotifications, cache_size = self.cacheSize) + """ The TinyDB table for the batchNotifications table.""" TinyDBBetterTable.assign(self.tabBatchNotifications) self.tabStatistics = self.dbStatistics.table(_statistics, cache_size = self.cacheSize) + """ The TinyDB table for the statistics table.""" TinyDBBetterTable.assign(self.tabStatistics) self.tabActions = self.dbActions.table(_actions, cache_size = self.cacheSize) + """ The TinyDB table for the actions table.""" TinyDBBetterTable.assign(self.tabActions) self.tabRequests = self.dbRequests.table(_requests, cache_size = self.cacheSize) + """ The TinyDB table for the requests table.""" TinyDBBetterTable.assign(self.tabRequests) + self.tabSchedules = self.dbSchedules.table(_schedules, cache_size = self.cacheSize) + """ The TinyDB table for the schedules table.""" + TinyDBBetterTable.assign(self.tabSchedules) + + # Create the Queries self.resourceQuery = Query() + """ The TinyDB query object for the resources table.""" self.identifierQuery = Query() + """ The TinyDB query object for the identifiers table.""" self.subscriptionQuery = Query() + """ The TinyDB query object for the subscriptions table.""" self.batchNotificationQuery = Query() + """ The TinyDB query object for the batchNotifications table.""" self.actionsQuery = Query() + """ The TinyDB query object for the actions table.""" self.requestsQuery = Query() + """ The TinyDB query object for the requests table.""" + self.schedulesQuery = Query() + """ The TinyDB query object for the schedules table.""" def _assignConfig(self) -> None: """ Assign default configurations. """ self.cacheSize = Configuration.get('database.cacheSize') + """ Size of the cache for the TinyDB tables. """ self.writeDelay = Configuration.get('database.writeDelay') + """ Delay for writing to the database. """ self.maxRequests = Configuration.get('cse.operation.requests.size') + """ Maximum number of oneM2M recorded requests to keep in the database. """ def closeDB(self) -> None: + """ Close the database. + """ L.isInfo and L.log('Closing DBs') with self.lockResources: self.dbResources.close() @@ -762,9 +1075,13 @@ def closeDB(self) -> None: self.dbActions.close() with self.lockRequests: self.dbRequests.close() + with self.lockSchedules: + self.dbSchedules.close() def purgeDB(self) -> None: + """ Purge the database. + """ L.isInfo and L.log('Purging DBs') self.tabResources.truncate() self.tabIdentifiers.truncate() @@ -775,16 +1092,27 @@ def purgeDB(self) -> None: self.tabStatistics.truncate() self.tabActions.truncate() self.tabRequests.truncate() + self.tabSchedules.truncate() def backupDB(self, dir:str) -> bool: + """ Backup the database to a directory. + + Args: + dir: The directory to backup to. + + Return: + Boolean value to indicate success or failure. + """ for fn in [ self.fileResources, self.fileIdentifiers, self.fileSubscriptions, self.fileBatchNotifications, self.fileStatistics, self.fileActions, - self.fileRequests]: + self.fileRequests, + self.fileSchedules + ]: if Path(fn).is_file(): shutil.copy2(fn, dir) return True @@ -796,40 +1124,58 @@ def backupDB(self, dir:str) -> bool: def insertResource(self, resource: Resource, ri:str) -> None: + """ Insert a resource into the database. + + Args: + resource: The resource to insert. + ri: The resource ID of the resource. + """ with self.lockResources: self.tabResources.insert(Document(resource.dict, ri)) # type:ignore[arg-type] - # self.tabResources.insert(resource.dict) def upsertResource(self, resource: Resource, ri:str) -> None: + """ Update or insert a resource into the database. + + Args: + resource: The resource to upate or insert. + ri: The resource ID of the resource. + """ #L.logDebug(resource) with self.lockResources: # Update existing or insert new when overwriting - # _ri = resource.ri - # self.tabResources.upsert(resource.dict, self.resourceQuery.ri == _ri) - self.tabResources.upsert(Document(resource.dict, doc_id = ri)) # type:ignore[arg-type] def updateResource(self, resource: Resource, ri:str) -> Resource: + """ Update a resource in the database. Only the fields that are not None will be updated. + + Args: + resource: The resource to update. + ri: The resource ID of the resource. + + Return: + The updated resource. + """ #L.logDebug(resource) with self.lockResources: self.tabResources.update(resource.dict, doc_ids = [ri]) # type:ignore[call-arg, list-item] - # self.tabResources.update(resource.dict, self.resourceQuery.ri == _ri) # remove nullified fields from db and resource for k in list(resource.dict): if resource.dict[k] is None: # only remove the real None attributes, not those with 0 self.tabResources.update(delete(k), doc_ids = [ri]) # type: ignore[no-untyped-call, call-arg, list-item] - # self.tabResources.update(delete(k), self.resourceQuery.ri == ri) # type: ignore [no-untyped-call] del resource.dict[k] return resource def deleteResource(self, resource:Resource) -> None: + """ Delete a resource from the database. + + Args: + resource: The resource to delete. + """ with self.lockResources: - _ri = resource.ri - self.tabResources.remove(doc_ids = [_ri]) - # self.tabResources.remove(self.resourceQuery.ri == _ri) + self.tabResources.remove(doc_ids = [resource.ri]) def searchResources(self, ri:Optional[str] = None, @@ -838,12 +1184,28 @@ def searchResources(self, ri:Optional[str] = None, pi:Optional[str] = None, ty:Optional[int] = None, aei:Optional[str] = None) -> list[Document]: + """ Search for resources by structured resource name, resource ID, CSE-ID, parent resource ID, resource type, + or application entity ID. + + Only one of the parameters may be used at a time. The order of precedence is: structured resource name, + resource ID, CSE-ID, structured resource name, parent resource ID, resource type, application entity ID. + + Args: + ri: A resource ID. + csi: A CSE ID. + srn: A structured resource name. + pi: A parent resource ID. + ty: A resource type. + aei: An application entity ID. + + Return: + A list of found resources, or an empty list. + """ if not srn: with self.lockResources: if ri: _r = self.tabResources.get(doc_id = ri) # type:ignore[arg-type] - return [_r] if _r else [] - # return self.tabResources.search(self.resourceQuery.ri == ri) + return [_r] if _r else [] # type:ignore[list-item] elif csi: return self.tabResources.search(self.resourceQuery.csi == csi) elif pi: @@ -864,6 +1226,14 @@ def searchResources(self, ri:Optional[str] = None, def discoverResourcesByFilter(self, func:Callable[[JSON], bool]) -> list[Document]: + """ Search for resources by a filter function. + + Args: + func: The filter function to use. + + Return: + A list of found resource documents, or an empty list. + """ with self.lockResources: return self.tabResources.search(func) # type: ignore [arg-type] @@ -872,11 +1242,24 @@ def hasResource(self, ri:Optional[str] = None, csi:Optional[str] = None, srn:Optional[str] = None, ty:Optional[int] = None) -> bool: + """ Check if a resource exists in the database. + + Only one of the parameters may be used at a time. The order of precedence is: structured resource name, + resource ID, CSE-ID, resource type. + + Args: + ri: A resource ID. + csi: A CSE ID. + srn: A structured resource name. + ty: A resource type. + + Return: + True if the resource exists, False otherwise. + """ if not srn: with self.lockResources: if ri: return self.tabResources.contains(doc_id = ri) # type:ignore[arg-type] - # return self.tabResources.contains(self.resourceQuery.ri == ri) elif csi : return self.tabResources.contains(self.resourceQuery.csi == csi) elif ty is not None: # ty is an int @@ -889,12 +1272,24 @@ def hasResource(self, ri:Optional[str] = None, def countResources(self) -> int: + """ Return the number of resources in the database. + + Return: + The number of resources in the database. + """ with self.lockResources: return len(self.tabResources) def searchByFragment(self, dct:dict) -> list[Document]: - """ Search and return all resources that match the given dictionary/document. """ + """ Search and return all resources that match the given dictionary/document. + + Args: + dct: The dictionary/document to search for. + + Return: + A list of found resources, or an empty list. + """ with self.lockResources: return self.tabResources.search(self.resourceQuery.fragment(dct)) @@ -902,7 +1297,14 @@ def searchByFragment(self, dct:dict) -> list[Document]: # Identifiers, Structured RI, Child Resources # - def insertIdentifier(self, resource:Resource, ri:str, srn:str) -> None: + def upsertIdentifier(self, resource:Resource, ri:str, srn:str) -> None: + """ Insert or update an identifier into the identifiers DB. + + Args: + resource: The resource to insert. + ri: The resource ID of the resource. + srn: The structured resource name of the resource. + """ # L.isDebug and L.logDebug({'ri' : ri, 'rn' : resource.rn, 'srn' : srn, 'ty' : resource.ty}) with self.lockIdentifiers: self.tabIdentifiers.upsert(Document( @@ -912,14 +1314,6 @@ def insertIdentifier(self, resource:Resource, ri:str, srn:str) -> None: 'ty' : resource.ty }, ri)) # type:ignore[arg-type] - # self.tabIdentifiers.upsert( - # { 'ri' : ri, - # 'rn' : resource.rn, - # 'srn' : srn, - # 'ty' : resource.ty - # }, - # self.identifierQuery.ri == ri) - with self.lockStructuredIDs: self.tabStructuredIDs.upsert( Document({'srn': srn, @@ -928,9 +1322,13 @@ def insertIdentifier(self, resource:Resource, ri:str, srn:str) -> None: def deleteIdentifier(self, resource:Resource) -> None: + """ Delete an identifier from the identifiers DB. + + Args: + resource: The resource for which to delete the identifier. + """ with self.lockIdentifiers: self.tabIdentifiers.remove(doc_ids = [resource.ri]) - # self.tabIdentifiers.remove(self.identifierQuery.ri == resource.ri) with self.lockStructuredIDs: self.tabStructuredIDs.remove(doc_ids = [resource.getSrn()]) # type:ignore[arg-type,list-item] @@ -947,24 +1345,29 @@ def searchIdentifiers(self, ri:Optional[str] = None, ri: Resource ID to search for. srn: Structured path to search for. Return: - A list of found identifier documents (see `insertIdentifier`), or an empty list if not found. + A list of found identifier documents (see `upsertIdentifier`), or an empty list if not found. """ + _r:Document if srn: - if (_r := self.tabStructuredIDs.get(doc_id = srn)): # type:ignore[arg-type] - ri = _r['ri'] if _r else None + if (_r := self.tabStructuredIDs.get(doc_id = srn)): # type:ignore[arg-type, assignment] + ri = _r['ri'] if _r else None else: return [] - # return self.tabIdentifiers.search(self.identifierQuery.srn == srn) if ri: with self.lockIdentifiers: - _r = self.tabIdentifiers.get(doc_id = ri) # type:ignore[arg-type] + _r = self.tabIdentifiers.get(doc_id = ri) # type:ignore[arg-type, assignment] return [_r] if _r else [] - # return self.tabIdentifiers.search(self.identifierQuery.ri == ri) return [] - def addChildResource(self, resource:Resource, ri:str) -> None: + def upsertChildResource(self, resource:Resource, ri:str) -> None: + """ Add a child resource to the childResources DB. + + Args: + resource: The resource to add as a child. + ri: The resource ID of the resource. + """ # L.isDebug and L.logDebug(f'insertChildResource ri:{ri}') pi = resource.pi @@ -979,15 +1382,21 @@ def addChildResource(self, resource:Resource, ri:str) -> None: # Then add the child ri to the parent's record if pi: # ATN: CSE has no parent - _r = self.tabChildResources.get(doc_id = pi) # type:ignore[arg-type] + _r:Document + _r = self.tabChildResources.get(doc_id = pi) # type:ignore[arg-type, assignment] _ch = _r['ch'] if ri not in _ch: _ch.append( [ri, ty] ) _r['ch'] = _ch - self.tabChildResources.update(_r, doc_ids = [pi])# type:ignore[arg-type, list-item] + self.tabChildResources.update(_r, doc_ids = [pi]) # type:ignore[arg-type, list-item] def removeChildResource(self, resource:Resource) -> None: + """ Remove a child resource from the childResources DB. + + Args: + resource: The resource to remove as a child. + """ ri = resource.ri pi = resource.pi @@ -998,7 +1407,7 @@ def removeChildResource(self, resource:Resource) -> None: self.tabChildResources.remove(doc_ids = [ri]) # type:ignore[arg-type, list-item] # Remove (ri, ty) tuple from parent record - _r = self.tabChildResources.get(doc_id = pi) # type:ignore[arg-type] + _r:Document = self.tabChildResources.get(doc_id = pi) # type:ignore[arg-type, assignment] _t = [ri, resource.ty] _ch = _r['ch'] if _t in _ch: @@ -1008,12 +1417,24 @@ def removeChildResource(self, resource:Resource) -> None: self.tabChildResources.update(_r, doc_ids = [pi]) # type:ignore[arg-type, list-item] - def searchChildResourcesByParentRI(self, pi:str, ty:Optional[int] = None) -> Optional[list[str]]: - _r = self.tabChildResources.get(doc_id = pi) #type:ignore[arg-type] + def searchChildResourcesByParentRI(self, pi:str, ty:Optional[ResourceTypes|list[ResourceTypes]] = None) -> list[str]: + """ Search for child resources by parent resource ID. + + Args: + pi: The parent resource ID. + ty: The resource type of the child resources to search for, or a list of resource types. + + Return: + A list of child resource IDs, or an empty list if not found. + """ + # First convert ty to a list if it is just an int + if isinstance(ty, int): + ty = [ty] + _r:Document = self.tabChildResources.get(doc_id = pi) #type:ignore[arg-type, assignment] if _r: if ty is None: # optimization: only check ty once for None return [ c[0] for c in _r['ch'] ] - return [ c[0] for c in _r['ch'] if ty == c[1] ] # c is a tuple (ri, ty) + return [ c[0] for c in _r['ch'] if c[1] in ty] # c is a tuple (ri, ty) return [] # @@ -1023,17 +1444,35 @@ def searchChildResourcesByParentRI(self, pi:str, ty:Optional[int] = None) -> Opt def searchSubscriptions(self, ri:Optional[str] = None, pi:Optional[str] = None) -> Optional[list[Document]]: + """ Search for subscription representations by resource ID or parent resource ID. + + Only one of the parameters may be used at a time. The order of precedence is: resource ID, parent resource ID. + + Args: + ri: A resource ID. + pi: A parent resource ID. + + Return: + A list of found subscription representations, or None. + """ with self.lockSubscriptions: if ri: - _r = self.tabSubscriptions.get(doc_id = ri) # type:ignore[arg-type] + _r:Document = self.tabSubscriptions.get(doc_id = ri) # type:ignore[arg-type, assignment] return [_r] if _r else [] - # return self.tabSubscriptions.search(self.subscriptionQuery.ri == ri) if pi: return self.tabSubscriptions.search(self.subscriptionQuery.pi == pi) return None def upsertSubscription(self, subscription:Resource) -> bool: + """ Update or insert a subscription representation into the database. + + Args: + subscription: The `SUB` (subscription) to update or insert. + + Return: + True if the subscription representation was updated or inserted, False otherwise. + """ with self.lockSubscriptions: ri = subscription.ri return self.tabSubscriptions.upsert( @@ -1048,6 +1487,7 @@ def upsertSubscription(self, subscription:Resource) -> bool: 'nus' : subscription.nu, 'bn' : subscription.bn, 'cr' : subscription.cr, + 'nec' : subscription.nec, 'org' : subscription.getOriginator(), 'ma' : fromDuration(subscription.ma) if subscription.ma else None, # EXPERIMENTAL ma = maxAge 'nse' : subscription.nse @@ -1056,6 +1496,14 @@ def upsertSubscription(self, subscription:Resource) -> bool: def removeSubscription(self, subscription:Resource) -> bool: + """ Remove a subscription representation from the database. + + Args: + subscription: The `SUB` (subscription) to remove. + + Return: + True if the subscription representation was removed, False otherwise. + """ with self.lockSubscriptions: return len(self.tabSubscriptions.remove(doc_ids = [subscription.ri])) > 0 # return len(self.tabSubscriptions.remove(self.subscriptionQuery.ri == _ri)) > 0 @@ -1066,6 +1514,16 @@ def removeSubscription(self, subscription:Resource) -> bool: # def addBatchNotification(self, ri:str, nu:str, notificationRequest:JSON) -> bool: + """ Add a batch notification to the database. + + Args: + ri: The resource ID of the resource. + nu: The notification URI. + notificationRequest: The notification request. + + Return: + True if the batch notification was added, False otherwise. + """ with self.lockBatchNotifications: return self.tabBatchNotifications.insert( { 'ri' : ri, @@ -1076,16 +1534,43 @@ def addBatchNotification(self, ri:str, nu:str, notificationRequest:JSON) -> bool def countBatchNotifications(self, ri:str, nu:str) -> int: + """ Return the number of batch notifications for a resource and notification URI. + + Args: + ri: The resource ID of the resource. + nu: The notification URI. + + Return: + The number of batch notifications for the resource and notification URI. + """ with self.lockBatchNotifications: return self.tabBatchNotifications.count((self.batchNotificationQuery.ri == ri) & (self.batchNotificationQuery.nu == nu)) def getBatchNotifications(self, ri:str, nu:str) -> list[Document]: + """ Return the batch notifications for a resource and notification URI. + + Args: + ri: The resource ID of the resource. + nu: The notification URI. + + Return: + A list of batch notifications for the resource and notification URI. + """ with self.lockBatchNotifications: return self.tabBatchNotifications.search((self.batchNotificationQuery.ri == ri) & (self.batchNotificationQuery.nu == nu)) def removeBatchNotifications(self, ri:str, nu:str) -> bool: + """ Remove the batch notifications for a resource and notification URI. + + Args: + ri: The resource ID of the resource. + nu: The notification URI. + + Return: + True if the batch notifications were removed, False otherwise. + """ with self.lockBatchNotifications: return len(self.tabBatchNotifications.remove((self.batchNotificationQuery.ri == ri) & (self.batchNotificationQuery.nu == nu))) > 0 @@ -1095,6 +1580,11 @@ def removeBatchNotifications(self, ri:str, nu:str) -> bool: # def searchStatistics(self) -> JSON: + """ Search for statistics. + + Return: + The statistics, or None if not found. + """ with self.lockStatistics: stats = self.tabStatistics.all() # stats = self.tabStatistics.get(doc_id = 1) @@ -1103,6 +1593,14 @@ def searchStatistics(self) -> JSON: def upsertStatistics(self, stats:JSON) -> bool: + """ Update or insert statistics. + + Args: + stats: The statistics to update or insert. + + Return: + True if the statistics were updated or inserted, False otherwise. + """ with self.lockStatistics: if len(self.tabStatistics) > 0: doc_id = self.tabStatistics.all()[0].doc_id @@ -1124,23 +1622,53 @@ def purgeStatistics(self) -> None: # def searchActionReprs(self) -> list[Document]: + """ Search for action representations. + + Return: + A list of action representations, or None if not found. + """ with self.lockActions: actions = self.tabActions.all() return actions if actions else None def getAction(self, ri:str) -> Optional[Document]: + """ Get an action representation by resource ID. + + Args: + ri: The resource ID of the action representation. + + Return: + The action representation, or None if not found. + """ with self.lockActions: - return self.tabActions.get(doc_id = ri) # type:ignore[arg-type] + return self.tabActions.get(doc_id = ri) # type:ignore[arg-type, return-value] def searchActionsDeprsForSubject(self, ri:str) -> Sequence[JSON]: + """ Search for action representations by subject. + + Args: + ri: The resource ID of the action representation's subject. + + Return: + A list of action representations, or None if not found. + """ with self.lockActions: return self.tabActions.search(self.actionsQuery.subject == ri) - # TODO add only? def upsertActionRepr(self, action:ACTR, periodTS:float, count:int) -> bool: + """ Update or insert an action representation. + + Args: + action: The action representation to update or insert. + periodTS: The timestamp for periodic execution. + count: The number of times the action will be executed. + + Return: + True if the action representation was updated or inserted, False otherwise. + """ with self.lockActions: _ri = action.ri _sri = action.sri @@ -1158,11 +1686,27 @@ def upsertActionRepr(self, action:ACTR, periodTS:float, count:int) -> bool: def updateActionRepr(self, actionRepr:JSON) -> bool: + """ Update an action representation. + + Args: + actionRepr: The action representation to update. + + Return: + True if the action representation was updated, False otherwise. + """ with self.lockActions: return self.tabActions.update(actionRepr, doc_ids = [actionRepr['ri']]) is not None # type:ignore[arg-type] def removeActionRepr(self, ri:str) -> bool: + """ Remove an action representation. + + Args: + ri: The action's resource ID. + + Return: + True if the action representation was removed, False otherwise. + """ with self.lockActions: if self.tabActions.get(doc_id = ri): # type:ignore[arg-type] return len(self.tabActions.remove(doc_ids = [ri])) > 0 # type:ignore[arg-type, list-item] @@ -1227,17 +1771,6 @@ def insertRequest(self, op:Operation, Document({k: v for k, v in _doc.items() if v is not None}, self.tabRequests.document_id_class(ts))) # type:ignore[arg-type] - # self.tabRequests.insert( - # Document({'ri': ri, - # 'srn': srn, - # 'ts': ts, - # 'org': originator, - # 'op': op, - # 'rsc': rsc, - # 'out': outgoing, - # 'req': request, - # 'rsp': response - # }, self.tabRequests.document_id_class(ts))) # type:ignore[arg-type] except Exception as e: L.logErr(f'Exception inserting request/response for ri: {ri}', exc = e) return False @@ -1270,4 +1803,75 @@ def deleteRequests(self, ri:Optional[str] = None) -> None: self.tabRequests.remove(self.requestsQuery.ri == ri) else: with self.lockRequests: - self.tabRequests.truncate() \ No newline at end of file + self.tabRequests.truncate() + + # + # Schedules + # + + def getSchedules(self) -> list[Document]: + """ Get all schedules from the database. + + Return: + List of *Documents*. May be empty. + """ + with self.lockSchedules: + return self.tabSchedules.all() + + + def getSchedule(self, ri:str) -> Optional[Document]: + """ Get a schedule from the database. + + Args: + ri: The resource ID of the schedule. + + Return: + The schedule, or *None* if not found. + """ + with self.lockSchedules: + return self.tabSchedules.get(doc_id = ri) # type:ignore[arg-type, return-value] + + + def searchSchedules(self, pi:str) -> list[Document]: + """ Search for schedules in the database. + + Args: + pi: The resource ID of the parent resource. + + Return: + List of *Documents*. May be empty. + """ + with self.lockSchedules: + return self.tabSchedules.search(self.schedulesQuery.pi == pi) + + + def upsertSchedule(self, ri:str, pi:str, schedule:list[str]) -> bool: + """ Add or update a schedule in the database. + + Args: + ri: The resource ID of the schedule. + pi: The resource ID of the schedule's parent resource. + schedule: The schedule to store. + + Return: + True if the schedule was added or updated, False otherwise. + """ + with self.lockSchedules: + return self.tabSchedules.upsert(Document( + { 'ri': ri, + 'pi': pi, + 'sce': schedule }, + ri)) is not None # type:ignore[arg-type] + + + def removeSchedule(self, ri:str) -> bool: + """ Remove a schedule from the database. + + Args: + ri: The resource ID of the schedule to remove. + + Return: + True if the schedule was removed, False otherwise. + """ + with self.lockSchedules: + return len(self.tabSchedules.remove(doc_ids = [ri])) > 0 # type:ignore[arg-type, list-item] \ No newline at end of file diff --git a/acme/services/TextUI.py b/acme/services/TextUI.py index 1efbd45e..7c1aa93d 100644 --- a/acme/services/TextUI.py +++ b/acme/services/TextUI.py @@ -11,7 +11,7 @@ from __future__ import annotations -from typing import Optional, Any +from typing import Optional, Any, Literal import asyncio from . import CSE @@ -156,6 +156,10 @@ def refreshResources(self) -> None: def scriptPrint(self, scriptName:str, msg:str) -> None: """ Print a line to the script output. + + Args: + scriptName: Name of the script. + msg: Message to print. """ if self.tuiApp: self.tuiApp.scriptPrint(scriptName, msg) @@ -163,6 +167,10 @@ def scriptPrint(self, scriptName:str, msg:str) -> None: def scriptLog(self, scriptName:str, msg:str) -> None: """ Print a line to the script log output. + + Args: + scriptName: Name of the script. + msg: Message to print. """ if self.tuiApp: self.tuiApp.scriptLog(scriptName, msg) @@ -170,6 +178,10 @@ def scriptLog(self, scriptName:str, msg:str) -> None: def scriptLogError(self, scriptName:str, msg:str) -> None: """ Print a line to the script log output. + + Args: + scriptName: Name of the script. + msg: Message to print. """ if self.tuiApp: self.tuiApp.scriptLogError(scriptName, msg) @@ -177,13 +189,32 @@ def scriptLogError(self, scriptName:str, msg:str) -> None: def scriptClearConsole(self, scriptName:str) -> None: """ Clear the script console. + + Args: + scriptName: Name of the script. """ if self.tuiApp: self.tuiApp.scriptClearConsole(scriptName) + def scriptShowNotification(self, msg:str, title:str, severity:Literal['information', 'warning', 'error'], timeout:float) -> None: + """ Show a notification. + + Args: + msg: Message to show. + title: Title of the notification. + severity: Severity of the notification. + timeout: Timeout in seconds. + """ + if self.tuiApp: + self.tuiApp.scriptShowNotification(msg, title, severity, timeout) + + def scriptVisualBell(self, scriptName:str) -> None: """ Visual bell. + + Args: + scriptName: Name of the script. """ if self.tuiApp: self.tuiApp.scriptVisualBell(scriptName) \ No newline at end of file diff --git a/acme/services/Validator.py b/acme/services/Validator.py index 247aa740..aa971148 100644 --- a/acme/services/Validator.py +++ b/acme/services/Validator.py @@ -11,16 +11,16 @@ from typing import Any, Dict, Tuple, Optional from copy import deepcopy -import re +import re, json import isodate from ..etc.Types import AttributePolicy, ResourceAttributePolicyDict, AttributePolicyDict, BasicType, Cardinality from ..etc.Types import RequestOptionality, Announced, AttributePolicy, ResultContentType -from ..etc.Types import JSON, FlexContainerAttributes, FlexContainerSpecializations -from ..etc.Types import CSEType, ResourceTypes, Permission, Operation, NotificationContentType, NotificationEventType +from ..etc.Types import JSON, FlexContainerAttributes, FlexContainerSpecializations, GeometryType, GeoSpatialFunctionType +from ..etc.Types import CSEType, ResourceTypes, Permission, Operation from ..etc.ResponseStatusCodes import ResponseStatusCode, BAD_REQUEST, ResponseException, CONTENTS_UNACCEPTABLE from ..etc.Utils import pureResource, strToBool -from ..helpers.TextTools import findXPath +from ..helpers.TextTools import findXPath, soundsLike from ..etc.DateUtils import fromAbsRelTimestamp from ..helpers import TextTools from ..resources.Resource import Resource @@ -54,6 +54,9 @@ complexTypeAttributes:dict[str, list[str]] = {} # TODO doc +attributesComplexTypes:dict[str, list[str]] = {} +# TODO doc + # TODO make this more generic! _valueNameMappings = { @@ -61,8 +64,10 @@ 'bts': lambda v: BatteryStatus(int(v)).name, 'chty': lambda v: ResourceTypes.fullname(int(v)), 'cst': lambda v: CSEType(int(v)).name, - 'nct': lambda v: NotificationContentType(int(v)).name, - 'net': lambda v: NotificationEventType(int(v)).name, + #'nct': lambda v: NotificationContentType(int(v)).name, + #'net': lambda v: NotificationEventType(int(v)).name, + 'gmty': lambda v: GeometryType(int(v)).name, + 'gsf': lambda v: GeoSpatialFunctionType(int(v)).name, 'op': lambda v: Operation(int(v)).name, 'rcn': lambda v: ResultContentType(int(v)).name, 'rsc': lambda v: ResponseStatusCode(int(v)).name, @@ -277,7 +282,7 @@ def validateAttribute(self, attribute:str, 'rqi' : AttributePolicy(type = BasicType.string, cardinality =Cardinality.CAR1, optionalCreate = RequestOptionality.M, optionalUpdate = RequestOptionality.M, optionalDiscovery = RequestOptionality.O, announcement = Announced.NA, sname = 'rqi', lname = 'requestIdentifier', namespace = 'm2m', tpe = 'm2m:rqi'), 'pc' : AttributePolicy(type = BasicType.dict, cardinality =Cardinality.CAR01, optionalCreate = RequestOptionality.O, optionalUpdate = RequestOptionality.O, optionalDiscovery = RequestOptionality.O, announcement = Announced.NA, sname = 'pc', lname = 'primitiveContent', namespace = 'm2m', tpe = 'm2m:pc'), 'to' : AttributePolicy(type = BasicType.string, cardinality =Cardinality.CAR01, optionalCreate = RequestOptionality.O, optionalUpdate = RequestOptionality.O, optionalDiscovery = RequestOptionality.O, announcement = Announced.NA, sname = 'to', lname = 'to', namespace = 'm2m', tpe = 'm2m:to'), - 'fr' : AttributePolicy(type = BasicType.string, cardinality =Cardinality.CAR01, optionalCreate = RequestOptionality.O, optionalUpdate = RequestOptionality.O, optionalDiscovery = RequestOptionality.O, announcement = Announced.NA, sname = 'fr', lname = 'from', namespace = 'm2m', tpe = 'm2m:fr'), + 'fr' : AttributePolicy(type = BasicType.ID, cardinality =Cardinality.CAR01, optionalCreate = RequestOptionality.O, optionalUpdate = RequestOptionality.O, optionalDiscovery = RequestOptionality.O, announcement = Announced.NA, sname = 'fr', lname = 'from', namespace = 'm2m', tpe = 'm2m:fr'), 'ot' : AttributePolicy(type = BasicType.timestamp, cardinality =Cardinality.CAR01, optionalCreate = RequestOptionality.O, optionalUpdate = RequestOptionality.O, optionalDiscovery = RequestOptionality.O, announcement = Announced.NA, sname = 'ot', lname = 'originatingTimestamp', namespace = 'm2m', tpe = 'm2m:or'), 'rset' : AttributePolicy(type = BasicType.absRelTimestamp, cardinality =Cardinality.CAR01, optionalCreate = RequestOptionality.O, optionalUpdate = RequestOptionality.O, optionalDiscovery = RequestOptionality.O, announcement = Announced.NA, sname = 'rset', lname = 'resultExpirationTimestamp', namespace = 'm2m', tpe = 'm2m:rset'), 'ec' : AttributePolicy(type = BasicType.positiveInteger, cardinality =Cardinality.CAR01, optionalCreate = RequestOptionality.O, optionalUpdate = RequestOptionality.O, optionalDiscovery = RequestOptionality.O, announcement = Announced.NA, sname = 'ec', lname = 'eventCategory', namespace = 'm2m', tpe = 'm2m:ec'), @@ -319,10 +324,11 @@ def validatePrimitiveContent(self, pc:JSON) -> None: def validatePvs(self, dct:JSON) -> None: """ Validating special case for lists that are not allowed to be empty (pvs in ACP). """ - if (l :=len(dct['pvs'])) == 0: - raise BAD_REQUEST(L.logWarn('Attribute pvs must not be an empty list')) - elif l > 1: - raise BAD_REQUEST(L.logWarn('Attribute pvs must contain only one item')) + match len(dct['pvs']): + case 0: + raise BAD_REQUEST(L.logWarn('Attribute pvs must not be an empty list')) + case l if l > 1: + raise BAD_REQUEST(L.logWarn('Attribute pvs must contain only one item')) if not (acr := findXPath(dct, 'pvs/acr')): raise BAD_REQUEST(L.logWarn('Attribute pvs/acr not found')) if not isinstance(acr, list): @@ -357,6 +363,102 @@ def validateCSICB(self, val:str, name:str) -> None: # fall-through + def validateGeoPoint(self, geo:dict) -> bool: + """ Validate a GeoJSON point. A point is a list of two or three floats. + + Args: + geo: GeoJSON point. + + Return: + Boolean, indicating whether the point is valid. + """ + if not isinstance(geo, list) or 2 > len(geo) > 3: + return False + for g in geo: + if not isinstance(g, float): + return False + return True + + + def validateGeoLinePolygon(self, geo:dict, isPolygon:Optional[bool] = False) -> bool: + """ Validate a GeoJSON line or polygon. + A line or polygon is a list of lists of two or three floats. + + Args: + geo: GeoJSON string line or polygon. + isPolygon: Boolean, indicating whether the coordinates describe a polygon. + + Return: + Boolean, indicating whether the line or polygon is valid. + """ + if not isinstance(geo, list) or len(geo) < 2: + return False + for g in geo: + if not self.validateGeoPoint(g): + return False + if isPolygon and geo[0] != geo[-1]: + return False + return True + + + def validateGeoMultiLinePolygon(self, geo:dict, isPolygon:Optional[bool] = False) -> bool: + """ Validate a GeoJSON multi line or polygon. + A line or polygon is a list of list of lists of two or three floats. + + Args: + geo: GeoJSON string multi line or polygon. + isPolygon: Boolean, indicating whether the coordinates describe a polygon. + + Return: + Boolean, indicating whether the line or polygon is valid. + """ + if not isinstance(geo, list): + return False + + for g in geo: + if not isinstance(g, list) or len(g) < 2: + return False + if not self.validateGeoLinePolygon(g, isPolygon): + return False + return True + + + def validateGeoLocation(self, loc:dict) -> dict: + """ Validate a GeoJSON location. A location is a dictionary with a type and coordinates. + + Args: + loc: GeoJSON location. + + Return: + The validated location dictionary. + + Raises: + BAD_REQUEST: If the location definition is invalid. + """ + crd = json.loads(loc.get('crd')) # was validated before + match loc.get('typ'): + case GeometryType.Point: + if not self.validateGeoPoint(crd): + raise BAD_REQUEST(L.logWarn(f'Invalid GeoJSON point: {crd}')) + case GeometryType.LineString: + if not self.validateGeoLinePolygon(crd): + raise BAD_REQUEST(L.logWarn(f'Invalid GeoJSON LineString: {crd}')) + case GeometryType.Polygon: + if not self.validateGeoLinePolygon(crd, True): + raise BAD_REQUEST(L.logWarn(f'Invalid GeoJSON Polygon: {crd}')) + case GeometryType.MultiPoint: + for p in crd: + if not self.validateGeoPoint(p): + raise BAD_REQUEST(L.logWarn(f'Invalid GeoJSON MultiPoint: {crd}')) + case GeometryType.MultiLineString: + if not self.validateGeoMultiLinePolygon(crd): + raise BAD_REQUEST(L.logWarn(f'Invalid GeoJSON MultiLineString: {crd}')) + case GeometryType.MultiPolygon: + if not self.validateGeoMultiLinePolygon(crd, True): + raise BAD_REQUEST(L.logWarn(f'Invalid GeoJSON MultiPolygon: {crd}')) + return crd + + def isExtraResourceAttribute(self, attr:str, resource:Resource) -> bool: """ Check whether the resource attribute *attr* is neither a universal, common, or resource attribute, nor an internal attribute. @@ -487,9 +589,21 @@ def addAttributePolicy(self, rtype:ResourceTypes|str, attr:str, attrPolicy:Attri else: complexTypeAttributes[attrPolicy.ctype] = [ attr ] + if (ctypes := attributesComplexTypes.get(attr)): + ctypes.append(attrPolicy.ctype) + else: + attributesComplexTypes[attr] = [ attrPolicy.ctype ] + def getAttributePolicy(self, rtype:ResourceTypes|str, attr:str) -> AttributePolicy: """ Return the attributePolicy for a resource type. + + Args: + rtype: Resource type. + attr: Attribute name. + + Return: + AttributePolicy or None. """ # Search for the specific type first if (ap := attributePolicies.get((rtype, attr))): @@ -503,6 +617,47 @@ def getAttributePolicy(self, rtype:ResourceTypes|str, attr:str) -> AttributePoli return None + def getAttributePoliciesByName(self, attr:str) -> Optional[list[AttributePolicy]]: + """ Return the attribute policies for an attribute name. + + Args: + attr: Attribute name. + + Return: + List of AttributePolicy or None. + """ + result = { } + keys = attributePolicies.keys() + _attrlower = attr.lower() + + # First search for the specific attribute name + for each in keys: + s = each[1] + if s == _attrlower: + result[s] = attributePolicies[each] + break + + # If it couldn't be found, search for similar full attribute names + if not result: + for each in keys: + s = each[1] + v = attributePolicies[each] + if soundsLike(_attrlower, v.lname, 99): + if s not in result: + result[s] = v + + # If it couldn't be found, search for parts of the attribute name + for each in keys: + s = each[1] + v = attributePolicies[each] + if _attrlower in v.lname.lower(): + if s not in result: + result[s] = v + + + return [ each for each in result.values() ] + + def getComplexTypeAttributePolicies(self, ctype:str) -> Optional[list[AttributePolicy]]: if (attrs := complexTypeAttributes.get(ctype)): return [ self.getAttributePolicy(ctype, attr) for attr in attrs ] @@ -532,29 +687,56 @@ def getShortnameLongNameMapping(self) -> dict[str, str]: return result - def getAttributeValueName(self, key:str, value:str) -> str: + def getAttributeValueName(self, attr:str, value:int, rtype:Optional[ResourceTypes] = None) -> str: """ Return the name of an attribute value. This is usually used for enumerations, where the value is a number and the name is a string. Args: - key: String, attribute name. - value: String, attribute value. + attr: Attribute name. + value: Attribute value. Return: String, name of the attribute value. """ try: - if key in _valueNameMappings: - return _valueNameMappings[key](value) # type: ignore [no-untyped-call] + if attr in _valueNameMappings: + return _valueNameMappings[attr](value) # type: ignore [no-untyped-call] + from ..services import CSE + return CSE.validator.getEnumInterpretation(rtype, attr, value) except Exception as e: return str(e) + + + def getEnumInterpretation(self, rtype: ResourceTypes, attr:str, value:int) -> str: + """ Return the interpretation of an enumeration. + + Args: + rtype: Resource type. May be None. + attr: Attribute name. + value: Enumeration value. + + Return: + String, interpretation of the enumeration, or the value itself if no interpretation is available. + """ + if rtype is not None: + if (policy := self.getAttributePolicy(rtype, attr)) and policy.evalues: + return policy.evalues.get(int(value), str(value)) + + if (ctype := attributesComplexTypes.get(attr)): + if (policy := self.getAttributePolicy(ctype[0], attr)) and policy.evalues: # just any policy for the complex type + return policy.evalues.get(int(value), str(value)) return '' - + return str(value) + # # Internals. # + _ncNameDisallowedChars = ( '!', '"', '#', '$', '%', '&', '\'', '(', ')', + '*', '+', ',', '/', ':', ';', '<', '=', '>', + '?', '@', '[', ']', '^', '´' , '`', '{', '|', '}', '~' ) + def _validateType(self, dataType:BasicType, value:Any, convert:Optional[bool] = False, @@ -568,7 +750,7 @@ def _validateType(self, dataType:BasicType, value and the method will attempt to convert the value to its target type; otherwise this is an error. Return: - Result. If the check is positive then Result.data is set to a tuple (the determined data type, the converted value). + Result. If the check is positive then a tuple is returned: (the determined data type, the converted value). """ # Ignore None values @@ -578,144 +760,164 @@ def _validateType(self, dataType:BasicType, # convert some types if necessary if convert: - if dataType in ( BasicType.positiveInteger, - BasicType.nonNegInteger, - BasicType.unsignedInt, - BasicType.unsignedLong, - BasicType.integer, - BasicType.enum ) and isinstance(value, str): - try: - value = int(value) - except Exception as e: - raise BAD_REQUEST(str(e)) - elif dataType == BasicType.boolean and isinstance(value, str): # "true"/"false" - try: - value = strToBool(value) - except Exception as e: - raise BAD_REQUEST(str(e)) - elif dataType == BasicType.float and isinstance(value, str): + if isinstance(value, str): try: - value = float(value) + match dataType: + case BasicType.positiveInteger |\ + BasicType.nonNegInteger |\ + BasicType.unsignedInt |\ + BasicType.unsignedLong |\ + BasicType.integer |\ + BasicType.enum: + value = int(value) + + case BasicType.boolean: + value = strToBool(value) + + case BasicType.float: + value = float(value) + except Exception as e: raise BAD_REQUEST(str(e)) # Check types and values - if dataType == BasicType.positiveInteger: - if isinstance(value, int): - if value > 0: + match dataType: + case BasicType.positiveInteger: + if isinstance(value, int) and value > 0: return (dataType, value) - raise BAD_REQUEST('value must be > 0') - raise BAD_REQUEST(f'invalid type: {type(value).__name__}. Expected: positive integer') - - if dataType == BasicType.enum: - if isinstance(value, int): - if policy is not None and len(policy.evalues) and value not in policy.evalues: - raise BAD_REQUEST('undefined enum value') - return (dataType, value) - raise BAD_REQUEST(f'invalid type: {type(value).__name__}. Expected: positive integer') + raise BAD_REQUEST(f'invalid type: {type(value).__name__}. Expected: positive integer') - if dataType == BasicType.nonNegInteger: - if isinstance(value, int): - if value >= 0: + case BasicType.enum: + if isinstance(value, int): + if policy is not None and len(policy.evalues) and value not in policy.evalues: + raise BAD_REQUEST('undefined enum value') + return (dataType, value) + raise BAD_REQUEST(f'invalid type: {type(value).__name__}. Expected: integer') + + case BasicType.nonNegInteger: + if isinstance(value, int) and value >= 0: + return (dataType, value) + raise BAD_REQUEST(f'invalid type: {type(value).__name__}. Expected: non-negative integer') + + case BasicType.unsignedInt | BasicType.unsignedLong: + if isinstance(value, int): return (dataType, value) - raise BAD_REQUEST('value must be >= 0') - raise BAD_REQUEST(f'invalid type: {type(value).__name__}. Expected: non-negative integer') + raise BAD_REQUEST(f'invalid type: {type(value).__name__}. Expected: unsigned integer') - if dataType in ( BasicType.unsignedInt, BasicType.unsignedLong ): - if isinstance(value, int): + case BasicType.timestamp if isinstance(value, str): + if fromAbsRelTimestamp(value) == 0.0: + raise BAD_REQUEST(f'format error in timestamp: {value}') + return (dataType, value) + + case BasicType.absRelTimestamp: + match value: + case str(): + try: + int(value) + # fallthrough + except Exception as e: # could happen if this is a string with an iso timestamp. Then try next test + if fromAbsRelTimestamp(value) == 0.0: + raise BAD_REQUEST(f'format error in absRelTimestamp: {value}') + # fallthrough + case int(): + pass + # fallthrough + case _: + raise BAD_REQUEST(f'unsupported data type for absRelTimestamp') + return (dataType, value) # int/long is ok + + case BasicType.string | BasicType.anyURI if isinstance(value, str): return (dataType, value) - raise BAD_REQUEST(f'invalid type: {type(value).__name__}. Expected: unsigned integer') - if dataType == BasicType.timestamp and isinstance(value, str): - if fromAbsRelTimestamp(value) == 0.0: - raise BAD_REQUEST(f'format error in timestamp: {value}') - return (dataType, value) + case BasicType.ID if isinstance(value, str): # TODO check for valid resourceID + return (dataType, value) + + case BasicType.ncname if isinstance(value, str): + if len(value) == 0 or value[0].isdigit() or value[0] in ('-', '.'): + raise BAD_REQUEST(f'invalid NCName: {value} (must not start with a digit, "-", or ".")') + for v in value: + if v.isspace(): + raise BAD_REQUEST(f'invalid NCName: {value} (must not contain whitespace)') + if v in self._ncNameDisallowedChars: + raise BAD_REQUEST(f'invalid NCName: {value} (must not contain any of {",".join(self._ncNameDisallowedChars)})') + return (dataType, value) - if dataType == BasicType.absRelTimestamp: - if isinstance(value, str): - try: - rel = int(value) - # fallthrough - except Exception as e: # could happen if this is a string with an iso timestamp. Then try next test - if fromAbsRelTimestamp(value) == 0.0: - raise BAD_REQUEST(f'format error in absRelTimestamp: {value}') - # fallthrough - elif not isinstance(value, int): - raise BAD_REQUEST(f'unsupported data type for absRelTimestamp') - return (dataType, value) # int/long is ok - - if dataType in ( BasicType.string, BasicType.anyURI ) and isinstance(value, str): - return (dataType, value) + case BasicType.list | BasicType.listNE if isinstance(value, list): + if dataType == BasicType.listNE and len(value) == 0: + raise BAD_REQUEST('empty list is not allowed') + if policy is not None and policy.ltype is not None: + for each in value: + self._validateType(policy.ltype, each, convert = convert, policy = policy) + return (dataType, value) - if dataType in ( BasicType.list, BasicType.listNE ) and isinstance(value, list): - if dataType == BasicType.listNE and len(value) == 0: - raise BAD_REQUEST('empty list is not allowed') - if policy is not None and policy.ltype is not None: - for each in value: - self._validateType(policy.ltype, each, convert = convert, policy = policy) - return (dataType, value) + case BasicType.complex: + # Check complex types + if not policy: + raise BAD_REQUEST(L.logErr(f'internal error: policy is missing for validation of complex attribute')) + + if isinstance(value, dict): + typeName = policy.lTypeName if policy.type == BasicType.list else policy.typeName; + for k, v in value.items(): + if not (p := self.getAttributePolicy(typeName, k)): + raise BAD_REQUEST(f'unknown or undefined attribute:{k} in complex type: {typeName}') + # recursively validate a dictionary attribute + self._validateType(p.type, v, convert = convert, policy = p) + + # Check that all mandatory attributes are present + attributeNames = value.keys() + for ap in self.getComplexTypeAttributePolicies(typeName): + if Cardinality.isMandatory(ap.cardinality) and ap.sname not in attributeNames: + raise BAD_REQUEST(f'attribute is mandatory for complex type : {typeName}.{ap.sname}') + return (dataType, value) + raise BAD_REQUEST(f'Expected complex type, found: {value}') - if dataType == BasicType.dict and isinstance(value, dict): - return (dataType, value) - - if dataType == BasicType.boolean: - if isinstance(value, bool): + case BasicType.dict if isinstance(value, dict): return (dataType, value) - raise BAD_REQUEST(f'invalid type: {type(value).__name__}. Expected: bool') - if dataType == BasicType.float: - if isinstance(value, (float, int)): - return (dataType, value) - raise BAD_REQUEST(f'invalid type: {type(value).__name__}. Expected: float') + case BasicType.boolean: + if isinstance(value, bool): + return (dataType, value) + raise BAD_REQUEST(f'invalid type: {type(value).__name__}. Expected: bool') + + case BasicType.integer: + if isinstance(value, int): + return (dataType, value) + raise BAD_REQUEST(f'invalid type: {type(value).__name__}. Expected: integer') + + case BasicType.float: + if isinstance(value, (float, int)): + return (dataType, value) + raise BAD_REQUEST(f'invalid type: {type(value).__name__}. Expected: float') + + case BasicType.geoJsonCoordinate if isinstance(value, str): + try: + geo = json.loads(value) + except Exception as e: + raise BAD_REQUEST(f'Invalid geoJsonCoordinate: {str(e)}') + if self.validateGeoPoint(geo) or self.validateGeoLinePolygon(geo) or self.validateGeoMultiLinePolygon(geo): + return (dataType, geo) + raise BAD_REQUEST(f'Invalid geoJsonCoordinate: {value}') - if dataType == BasicType.integer: - if isinstance(value, int): + case BasicType.duration: + try: + isodate.parse_duration(value) + except Exception as e: + raise BAD_REQUEST(f'must be an ISO duration (e.g. "PT2S"): {str(e)}') return (dataType, value) - raise BAD_REQUEST(f'invalid type: {type(value).__name__}. Expected: integer') - if dataType == BasicType.geoCoordinates and isinstance(value, dict): - return (dataType, value) - - if dataType == BasicType.duration: - try: - isodate.parse_duration(value) - except Exception as e: - raise BAD_REQUEST(f'must be an ISO duration: {str(e)}') - return (dataType, value) - - if dataType == BasicType.base64: - if not TextTools.isBase64(value): - raise BAD_REQUEST(f'value is not base64-encoded') - return (dataType, value) - - if dataType == BasicType.schedule: - if isinstance(value, str) and re.match(self._scheduleRegex, value): + case BasicType.base64: + if not TextTools.isBase64(value): + raise BAD_REQUEST(f'value is not base64-encoded') return (dataType, value) - raise BAD_REQUEST(f'invalid type: {type(value).__name__} or pattern {value}. Expected: cron-like schedule') - if dataType == BasicType.any: - return (dataType, value) - - if dataType == BasicType.complex: - if not policy: - raise BAD_REQUEST(L.logErr(f'internal error: policy is missing for validation of complex attribute')) - - if isinstance(value, dict): - typeName = policy.lTypeName if policy.type == BasicType.list else policy.typeName; - for k, v in value.items(): - if not (p := self.getAttributePolicy(typeName, k)): - raise BAD_REQUEST(f'unknown or undefined attribute:{k} in complex type: {typeName}') - # recursively validate a dictionary attribute - self._validateType(p.type, v, convert = convert, policy = p) - - # Check that all mandatory attributes are present - attributeNames = value.keys() - for ap in self.getComplexTypeAttributePolicies(typeName): - if Cardinality.isMandatory(ap.cardinality) and ap.sname not in attributeNames: - raise BAD_REQUEST(f'attribute is mandatory for complex type : {typeName}.{ap.sname}') + case BasicType.schedule: + if isinstance(value, str) and re.match(self._scheduleRegex, value): + return (dataType, value) + raise BAD_REQUEST(f'invalid type: {type(value).__name__} or pattern {value}. Expected: cron-like schedule') + + case BasicType.any: return (dataType, value) - raise BAD_REQUEST(f'Expected complex type, found: {value}') raise BAD_REQUEST(f'type mismatch or unknown; expected type: {str(dataType)}, value type: {type(value).__name__}') diff --git a/acme/textui/ACMEContainerDelete.py b/acme/textui/ACMEContainerDelete.py index 14e28304..aa45de88 100644 --- a/acme/textui/ACMEContainerDelete.py +++ b/acme/textui/ACMEContainerDelete.py @@ -1,4 +1,4 @@ - # +# # ACMEContainerDelete.py # # (c) 2023 by Andreas Kraft @@ -49,7 +49,7 @@ class ACMEContainerDelete(Container): width: 1fr; display: block; overflow: auto; - height: 3; + height: 1; content-align: center middle; background: $panel; } @@ -61,6 +61,8 @@ class ACMEContainerDelete(Container): """ def __init__(self) -> None: + """ Initialize the view. + """ super().__init__(id = idRequestDelete) self.requestOriginator = 'CAdmin' self.response = Static('', id = 'request-delete-response-response') @@ -70,6 +72,11 @@ def __init__(self) -> None: def compose(self) -> ComposeResult: + """ Compose the view. + + Returns: + The ComposeResult + """ with Vertical(id = 'request-delete-view'): yield self.fieldOriginator with Center(): @@ -85,7 +92,10 @@ def on_show(self) -> None: def updateResource(self, resource:Resource) -> None: self.requestOriginator = resource.getOriginator() - self.fieldOriginator.update(self.requestOriginator, [CSE.cseOriginator, self.requestOriginator]) + if self.requestOriginator: + self.fieldOriginator.update(self.requestOriginator, [CSE.cseOriginator, self.requestOriginator]) + else: # No originator, use CSE originator + self.fieldOriginator.update(CSE.cseOriginator, [CSE.cseOriginator]) self.resource = resource self.response.update('') @@ -112,3 +122,6 @@ def buttonExecute(self) -> None: self.response.update(f'Response Status: {e.rsc}\n\n[red]{e.dbg}[/red]') + @on(ACMEFieldOriginator.Submitted) + def inputFieldSubmitted(self, value:str) -> None: + self.buttonExecute() diff --git a/acme/textui/ACMEContainerDiagram.py b/acme/textui/ACMEContainerDiagram.py new file mode 100644 index 00000000..bcbbb6ff --- /dev/null +++ b/acme/textui/ACMEContainerDiagram.py @@ -0,0 +1,268 @@ +# +# ACMEContainerDiagram.py +# +# (c) 2023 by Andreas Kraft +# License: BSD 3-Clause License. See the LICENSE file for further details. +# +""" This module defines the Diagram view for for *Container* resources for the ACME text UI. +""" +from __future__ import annotations +from typing import Optional, Callable +from enum import IntEnum + +from textual.app import ComposeResult +from textual import on +from textual.containers import Container, Vertical, Center, Horizontal, Middle +from textual.binding import Binding +from textual.widgets import Button, RadioSet, RadioButton +from textual_plotext import PlotextPlot + +from ..etc.DateUtils import fromISO8601Date +from ..services import CSE + + +class DiagramTypes(IntEnum): + """ Enumeration of the different diagram types. + """ + Line = 0 + Graph = 1 + Scatter = 2 + Bar = 3 + Timeline = 4 + + +class ACMEContainerDiagram(Container): + + DEFAULT_CSS = ''' +#diagram-view { + height: 100%; + padding: 0 1 1 1; +} + +#diagram-plot { + /*height: 100%;*/ +} + +#diagram-footer { + width: 100%; + margin-top: 1; + height: 1; +} + +#diagram-button-set { + width: auto; + margin-bottom: 0; +} + +#diagram-line-button { + height: 1; + border: none; + margin-right: 1; + min-width: 10; +} + +#diagram-graph-button { + height: 1; + border: none; + margin-right: 1; + min-width: 11; +} + +#diagram-scatter-button { + height: 1; + border: none; + margin-right: 1; + min-width: 13; +} + +#diagram-bar-button { + height: 1; + border: none; + margin-right: 1; + min-width: 9; +} + +#diagram-timeline-button { + height: 1; + border: none; + margin-right: 0; + min-width: 14; +} + +#diagram-refresh-button { + height: 1; + border: none; + margin-left: 4; + margin-right: 0; + min-width: 13; +} +''' + +# TODO perhaps replace the PlotextPlot instance every time one chooses another diagram type + + def __init__(self, refreshCallback:Callable) -> None: + super().__init__() + self.color = (0, 120, 212) + self.type = DiagramTypes.Line + self.plotContainer:Container = None + self.plot:PlotextPlot = None + self.values:list[float] = [] + self.dates:Optional[list[str]] = [] + self.refreshCallback = refreshCallback + self.buttons = { + DiagramTypes.Line: Button('Line', variant = 'success', id = 'diagram-line-button'), + DiagramTypes.Graph: Button('Graph', variant = 'primary', id = 'diagram-graph-button'), + DiagramTypes.Scatter: Button('Scatter', variant = 'primary', id = 'diagram-scatter-button'), + DiagramTypes.Bar: Button('Bar', variant = 'primary', id = 'diagram-bar-button'), + DiagramTypes.Timeline: Button('Timeline', variant = 'primary', id = 'diagram-timeline-button'), + } + + + + def compose(self) -> ComposeResult: + self._newPlot() + with Vertical(id = 'diagram-view'): + yield self.plotContainer + with Center(id = 'diagram-footer'): + with Horizontal(id = 'diagram-button-set'): + for button in self.buttons.values(): + yield button + yield Button('Refresh', variant = 'primary', id = 'diagram-refresh-button') + + + def on_show(self) -> None: + self._activateButton(self.type) + self.plotGraph() + + + @on(Button.Pressed, '#diagram-line-button') + def lineButtonExecute(self) -> None: + """ Callback to switch to the line diagram. + """ + self._activateButton(DiagramTypes.Line) + + + @on(Button.Pressed, '#diagram-graph-button') + def graphButtonExecute(self) -> None: + """ Callback to switch to the graph diagram. + """ + self._activateButton(DiagramTypes.Graph) + + + @on(Button.Pressed, '#diagram-scatter-button') + def scatterButtonExecute(self) -> None: + """ Callback to switch to the scatter diagram. + """ + self._activateButton(DiagramTypes.Scatter) + + + @on(Button.Pressed, '#diagram-bar-button') + def barButtonExecute(self) -> None: + """ Callback to switch to the bar diagram. + """ + self._activateButton(DiagramTypes.Bar) + + + @on(Button.Pressed, '#diagram-timeline-button') + def timeLineButtonExecute(self) -> None: + """ Callback to switch to the timeline diagram. + """ + self._activateButton(DiagramTypes.Timeline) + + + @on(Button.Pressed, '#diagram-refresh-button') + def refreshButtonExecute(self) -> None: + """ Callback to refresh the diagram. + """ + if self.refreshCallback: + self.refreshCallback() + self.plotGraph() + + + def plotGraph(self) -> None: + """ Plot the graph. + """ + dates = [ fromISO8601Date(d).strftime('%d/%m/%Y %H:%M:%S') for d in self.dates ] if self.dates else None + values = self.values + + # plt.clear_data() + self._newPlot() + + plt = self.plot.plt + match self.type: + case DiagramTypes.Line: + if dates is None: + plt.plot(values, color = self.color) + else: + plt.plot(dates, values, color = self.color) + case DiagramTypes.Graph: + if dates is None: + plt.plot(values, color = self.color, fillx=True) + else: + plt.plot(dates, values, color = self.color, fillx=True) + case DiagramTypes.Scatter: + if dates is None: + plt.scatter(values, color = self.color) + else: + plt.scatter(dates, values, color = self.color) + case DiagramTypes.Bar: + if dates is None: + plt.bar(values, color = self.color) + else: + plt.bar(dates, values, color = self.color) + case DiagramTypes.Timeline: + _d = [ fromISO8601Date(d).strftime('%d/%m/%Y %H:%M:%S') for d in self.dates ] + if dates is None: + plt.event_plot(_d, color = self.color) + else: + plt.event_plot(dates, _d, color = self.color) # type: ignore[arg-type] + self.plot.refresh(layout = True) + + + def setData(self, values:list[float], dates:Optional[list[str]] = None) -> None: + """ Set the data to be displayed in the diagram. + + Args: + values: The data to be displayed. + dates: The dates for the data. If not given, the current time is used. + """ + self.values = values + self.dates = dates + + + ################################################################# + # + # Private + # + + def _newPlot(self) -> None: + """ Create a new plot instance and update the container. + """ + + # Remove a previous plot if there is one + if not self.plotContainer: + self.plotContainer = Container() + else: + self.plot.remove() + + # Create a new plot and configure its timestamp format + self.plot = PlotextPlot() + self.plot.plt.date_form('d/m/Y H:M:S', 'Y-m-d H:M:S') + + # Add the plot to the container and refresh the container + self.plotContainer._add_child(self.plot) + self.plotContainer.refresh(layout=True) + + + def _activateButton(self, type:DiagramTypes) -> None: + """ Activate a button. + + Args: + type: The button to activate. + """ + if self.type != type: + self.type = type + for b in self.buttons.values(): + b.variant = 'primary' + self.buttons[type].variant = 'success' + self.plotGraph() diff --git a/acme/textui/ACMEContainerRequests.py b/acme/textui/ACMEContainerRequests.py index 3f8b95bd..0734c1fc 100644 --- a/acme/textui/ACMEContainerRequests.py +++ b/acme/textui/ACMEContainerRequests.py @@ -61,7 +61,9 @@ class ACMEViewRequests(Vertical): BINDINGS = [ Binding('r', 'refresh_requests', 'Refresh'), Binding('D', 'delete_requests', 'Delete ALL Requests', key_display = 'SHIFT+D'), - Binding('e', 'enable_requests', '') + Binding('e', 'enable_requests', ''), + Binding('t', 'toggle_list_details', 'List Details'), + Binding('ctrl+t', 'toggle_comment_style', 'Comments Style'), ] DEFAULT_CSS = """ @@ -82,7 +84,7 @@ class ACMEViewRequests(Vertical): #request-list-header { /* overflow: auto hidden; */ width: 1fr; - height: 3; + height: 1; align-vertical: middle; background: $panel; } @@ -95,7 +97,7 @@ class ACMEViewRequests(Vertical): #request-list-details-header { overflow: auto; - height: 3; + height: 1; align-vertical: middle; background: $panel; } @@ -132,10 +134,12 @@ def __init__(self) -> None: # Request List self.requestList = ListView(id = 'request-list-list') + self.listDetails = False # Request view: request + response self.requestListRequest = Static(id = 'request-list-request') self.requestListResponse = Static(id = 'request-list-response') + self.commentsOneLine = True @property @@ -197,19 +201,26 @@ def _showRequests(self, item:ACMEListItem) -> None: """ # Get the request's json jsns = commentJson(self._currentRequests[cast(ACMEListItem, item)._data]['req'], - explanations = self.app.attributeExplanations, # type: ignore [attr-defined] - getAttributeValueName = CSE.validator.getAttributeValueName, # type: ignore [attr-defined] - width = self.requestListRequest.size[0] - 2) # type: ignore [attr-defined] + explanations = self.app.attributeExplanations, # type: ignore [attr-defined] + getAttributeValueName = CSE.validator.getAttributeValueName, # type: ignore [attr-defined] + width = None if self.commentsOneLine else self.requestListRequest.size[0] - 2) # type: ignore [attr-defined] + _l1 = jsns.count('\n') # Add syntax highlighting and explanations, and add to the view self.requestListRequest.update(Syntax(jsns, 'json', theme = self.app.syntaxTheme)) # type: ignore [attr-defined] # Get the response's json jsns = commentJson(self._currentRequests[cast(ACMEListItem, item)._data]['rsp'], - explanations = self.app.attributeExplanations, # type: ignore [attr-defined] - getAttributeValueName = CSE.validator.getAttributeValueName, # type: ignore [attr-defined] - width = self.requestListRequest.size[0] - 2) # type: ignore [attr-defined] - + explanations = self.app.attributeExplanations, # type: ignore [attr-defined] + getAttributeValueName = CSE.validator.getAttributeValueName, # type: ignore [attr-defined] + width = None if self.commentsOneLine else self.requestListRequest.size[0] - 2) # type: ignore [attr-defined] + _l2 = jsns.count('\n') + + # Make sure the response has the same number of lines as the request + # (This is a hack to make sure the separator line covers the entire height of the view) + if _l1 > _l2: + jsns += '\n' * (_l1 - _l2) + # Add syntax highlighting and explanations, and add to the view self.requestListResponse.update(Syntax(jsns, 'json', theme = self.app.syntaxTheme)) # type: ignore [attr-defined] @@ -230,6 +241,16 @@ def action_enable_requests(self) -> None: def action_disable_requests(self) -> None: CSE.request.enableRequestRecording = False self.updateBindings() + + + def action_toggle_list_details(self) -> None: + self.listDetails = not self.listDetails + self.updateRequests() + + + def action_toggle_comment_style(self) -> None: + self.commentsOneLine = not self.commentsOneLine + self.updateRequests() def updateBindings(self) -> None: @@ -271,11 +292,18 @@ def rscFmt(rsc:int) -> str: # _to = _to if _to else '' _srn = r.get('srn', '') # _srn = _srn if _srn else '' - self.requestList.append(_l := ACMEListItem( - Label(f' {i:4} - {_ts[1]} {Operation(r["op"]).name:10.10} {str(r.get("org", "")):30.30} {str(_to):30.30} {rscFmt(r["rsc"])}\n [dim]{_ts[0]}[/dim] [dim]{_srn}[/dim]'))) + match self.listDetails: + case True: + _l = ACMEListItem(Label(f' {i:4} - {_ts[1]} {Operation(r["op"]).name:10.10} {str(r.get("org", "")):30.30} {str(_to):30.30} {rscFmt(r["rsc"])}\n [dim]{_ts[0]}[/dim] [dim]{_srn}[/dim]')) + case False: + _l = ACMEListItem(Label(f' {i:4} - {_ts[1]} {Operation(r["op"]).name:10.10} {str(r.get("org", "")):30.30} {str(_to):30.30} {rscFmt(r["rsc"])}')) + _l._data = i if r['out']: _l.set_class(True, '--outgoing') + self.requestList.append(_l) + # self.requestList.append(_l := ACMEListItem( + # Label(f' {i:4} - {_ts[1]} {Operation(r["op"]).name:10.10} {str(r.get("org", "")):30.30} {str(_to):30.30} {rscFmt(r["rsc"])}\n [dim]{_ts[0]}[/dim] [dim]{_srn}[/dim]'))) def deleteRequests(self) -> None: diff --git a/acme/textui/ACMEContainerTools.py b/acme/textui/ACMEContainerTools.py index 5e4ef9bb..3159e2e5 100644 --- a/acme/textui/ACMEContainerTools.py +++ b/acme/textui/ACMEContainerTools.py @@ -14,13 +14,14 @@ from textual.app import ComposeResult from textual.binding import Binding from textual.containers import Container, Vertical, Center, Middle -from textual.widgets import Button, Tree as TextualTree, Markdown, TextLog +from textual.widgets import Button, Tree as TextualTree, Markdown, RichLog, Label from textual.widgets.tree import TreeNode from ..services import CSE from ..services.ScriptManager import PContext from ..helpers.ResourceSemaphore import CriticalSection from ..helpers.BackgroundWorker import BackgroundWorkerPool, BackgroundWorker from ..helpers.Interpreter import SSymbol +from ..textui.ACMEFieldOriginator import ACMEInputField # TODO Add editing of configuration values @@ -95,11 +96,14 @@ def _showTool(self, node:TreeNode) -> None: # Stop a currently running autorun worker when the node is different # from the previous autorun node self.stopAutoRunScript(str(node.label)) + self.parentContainer.toolsInput.value = '' + if node.children: # This is a category node, so set the description, clear the button etc. self.parentContainer.toolsHeader.update(f'## {node.label}\n{CSE.script.categoryDescriptions.get(str(node.label), "")}') self.parentContainer.toolsExecButton.styles.visibility = 'hidden' + self.parentContainer.toolsInput.styles.visibility = 'hidden' self.parentContainer.toolsLog.clear() @@ -118,6 +122,13 @@ def _showTool(self, node:TreeNode) -> None: {description} """) + # Add input field if the meta tag "tuiInput" is set + if (_l := ctx.getMeta('tuiInput')): + self.parentContainer.toolsInput.styles.visibility = 'visible' + self.parentContainer.toolsInput.setLabel(_l) + else: + self.parentContainer.toolsInput.styles.visibility = 'hidden' + # configure the button according to the meta tag "tuiExecuteButton" self.parentContainer.toolsExecButton.styles.visibility = 'visible' self.parentContainer.toolsExecButton.label = 'Execute' @@ -155,6 +166,7 @@ def _showTool(self, node:TreeNode) -> None: else: self.parentContainer.toolsHeader.update('') self.parentContainer.toolsExecButton.styles.visibility = 'hidden' + self.parentContainer.toolsInput.styles.visibility = 'hidden' def printLogs(self) -> None: @@ -216,7 +228,7 @@ class ACMEContainerTools(Container): display: block; overflow: auto auto; min-width: 100%; - height: 1fr; + height: 1.5fr; margin: 0 0 0 0; } @@ -259,11 +271,14 @@ def __init__(self, tuiApp:ACMETuiApp.ACMETuiApp) -> None: self.toolsTree = ACMEToolsTree('Tools & Commands', id = 'tree-view') self.toolsTree.parentContainer = self - - self.toolsExecButton = Button('Execute', id = 'tool-execute', variant = 'primary') + + self.toolsInput = ACMEInputField(id = 'tools-argument') + self.toolsInput.styles.visibility = 'hidden' + + self.toolsExecButton = Button('Execute', id = 'tool-execute-button', variant = 'primary') self.toolsExecButton.styles.visibility = 'hidden' - self.toolsLog = TextLog(id = 'tools-log-view', markup=True) + self.toolsLog = RichLog(id = 'tools-log-view', markup=True) def compose(self) -> ComposeResult: @@ -273,8 +288,11 @@ def compose(self) -> ComposeResult: with Center(id = 'tools-top-view'): yield self.toolsHeader with Middle(id = 'tools-arguments-view'): + with Center(): + yield self.toolsInput with Center(): yield self.toolsExecButton + yield self.toolsLog @@ -287,11 +305,16 @@ def leaving_tab(self) -> None: self.toolsTree.stopAutoRunScript() - @on(Button.Pressed, '#tool-execute') + @on(Button.Pressed, '#tool-execute-button') def buttonExecute(self) -> None: - _executeScript(str(self.toolsTree.cursor_node.label)) + _executeScript(str(self.toolsTree.cursor_node.label), argument = str(self.toolsInput.value)) + @on(ACMEInputField.Submitted) + def inputFieldSubmitted(self) -> None: + self.buttonExecute() + + def action_clear_log(self) -> None: # Clear the log view self.toolsLog.clear() @@ -410,7 +433,7 @@ def _getContext(name:str) -> Optional[PContext]: return None -def _executeScript(name:str, autoRun:Optional[bool] = False) -> bool: +def _executeScript(name:str, autoRun:Optional[bool] = False, argument:Optional[str] = '') -> bool: """ Executes the given script context. Args: @@ -418,6 +441,7 @@ def _executeScript(name:str, autoRun:Optional[bool] = False) -> bool: """ if (ctx := _getContext(str(name))) and not ctx.state.isRunningState(): return CSE.script.runScript(ctx, + arguments = argument, background = True, environment = { 'tui.autorun': SSymbol(boolean = autoRun), } diff --git a/acme/textui/ACMEContainerTree.py b/acme/textui/ACMEContainerTree.py index 784af776..73929921 100644 --- a/acme/textui/ACMEContainerTree.py +++ b/acme/textui/ACMEContainerTree.py @@ -8,6 +8,7 @@ """ from __future__ import annotations from typing import List, Tuple, Optional +from datetime import datetime from textual import events from textual.app import ComposeResult from textual.widgets import Tree as TextualTree, Static, TabbedContent, TabPane, Markdown, Label, Button @@ -20,8 +21,10 @@ from ..textui.ACMEContainerRequests import ACMEViewRequests from ..etc.ResponseStatusCodes import ResponseException from ..etc.Types import ResourceTypes +from ..etc.DateUtils import fromAbsRelTimestamp from ..helpers.TextTools import commentJson from .ACMEContainerDelete import ACMEContainerDelete +from .ACMEContainerDiagram import ACMEContainerDiagram idTree = 'tree' @@ -87,13 +90,14 @@ def _update_content(self, ri:str) -> None: self.parentContainer.header.update(f'## {ResourceTypes.fullname(resource.ty)}' if resource else '##  ') + def _retrieve_resource_children(self, ri:str) -> List[Tuple[Resource, bool]]: result:List[Tuple[Resource, bool]] = [] - chs = [ x for x in CSE.dispatcher.directChildResources(ri) if not x.ty in [ ResourceTypes.GRP_FOPT, ResourceTypes.PCH_PCU ]] + chs = [ x for x in CSE.dispatcher.retrieveDirectChildResources(ri) if not x.ty in [ ResourceTypes.GRP_FOPT, ResourceTypes.PCH_PCU ]] # chs = [ x for x in CSE.dispatcher.directChildResources(ri) if not x.isVirtual() ] # chs = [ x for x in CSE.dispatcher.directChildResources(ri) if not x.isVirtual() ] for r in chs: - result.append((r, len([ x for x in CSE.dispatcher.directChildResources(r.ri) ]) > 0)) + result.append((r, len([ x for x in CSE.dispatcher.retrieveDirectChildResources(r.ri) ]) > 0)) # result.append((r, len([ x for x in CSE.dispatcher.directChildResources(r.ri) if not x.isVirtual() ]) > 0)) return result @@ -133,6 +137,8 @@ class ACMEContainerTree(Container): /* TODO try to get padding working with later released of textualize */ } + + ''' def __init__(self) -> None: @@ -145,13 +151,16 @@ def __init__(self) -> None: # Tabs self.tabs = TabbedContent() - # Resource and Request views + # Various Resource and Request views self.deleteView = ACMEContainerDelete() + self.diagram = ACMEContainerDiagram(refreshCallback = lambda: self.updateResource()) # For some reason, the markdown header is not refreshed the very first time self.header = Markdown('') self.resourceView = Static(id = 'resource-view', expand = True) self.requestView = ACMEViewRequests() + self.commentsOneLine = True + def compose(self) -> ComposeResult: @@ -166,6 +175,9 @@ def compose(self) -> ComposeResult: with TabPane('Requests', id = 'tree-tab-requests'): yield self.requestView + with TabPane('Diagram', id = 'tree-tab-diagram'): + yield self.diagram + # with TabPane('CREATE', id = 'tree-tab-create', disabled = True): # yield Markdown('## Send CREATE Request') # yield Label('TODO') @@ -175,6 +187,7 @@ def compose(self) -> ComposeResult: # with TabPane('UPDATE', id = 'tree-tab-update', disabled = True): # yield Markdown('## Send UPDATE Request') # yield Label('TODO') + with TabPane('DELETE', id = 'tree-tab-delete'): yield Markdown('## Send DELETE Request') yield self.deleteView @@ -206,14 +219,19 @@ def update(self) -> None: def updateResource(self, resource:Optional[Resource] = None) -> None: - self.resource = resource + if resource: + # Store the resource for later + self.resource = resource + else: + # Otherwise use the old / current resource + resource = self.resource # Add attribute explanations if resource: jsns = commentJson(resource.asDict(sort = True), explanations = self.app.attributeExplanations, # type: ignore [attr-defined] - getAttributeValueName = CSE.validator.getAttributeValueName, # type: ignore [attr-defined] - width = (self.resourceView.size[0] - 2) if self.resourceView.size[0] > 0 else 9999) # type: ignore [attr-defined] + getAttributeValueName = lambda a, v: CSE.validator.getAttributeValueName(a, v, resource.ty if resource else None), # type: ignore [attr-defined] + width = None if self.commentsOneLine else self.requestListRequest.size[0] - 2) # type: ignore [attr-defined] # Update the requests view self._update_requests(resource.ri) @@ -222,6 +240,31 @@ def updateResource(self, resource:Optional[Resource] = None) -> None: self.deleteView.updateResource(resource) self.deleteView.disabled = False + # Update Diagram view + try: + if resource.ty in (ResourceTypes.CNT, ResourceTypes.TS): + instances = CSE.dispatcher.retrieveDirectChildResources(resource.ri, [ResourceTypes.CIN, ResourceTypes.TSI]) + + # The following line may fail if the content cannot be converted to a float. + # This is expected! This just means that any content is not a number and we cannot raw a diagram. + # The exception is caught below and the diagram view is hidden. + values = [float(r.con) for r in instances] + + dates = [r.ct for r in instances] + # values = [float(r.con) + # for r in instances + # if r.ty in (ResourceTypes.CIN, ResourceTypes.TSI)] + # dates = [r.ct + # for r in instances + # if r.ty in (ResourceTypes.CIN, ResourceTypes.TSI)] + + self.diagram.setData(values, dates) + self.tabs.show_tab('tree-tab-diagram') + else: + self.tabs.hide_tab('tree-tab-diagram') + except: + self.tabs.hide_tab('tree-tab-diagram') + else: jsns = '' @@ -243,11 +286,14 @@ async def on_tabbed_content_tab_activated(self, event:TabbedContent.TabActivated """Handle TabActivated message sent by Tabs.""" # self.app.debugConsole.update(event.tab.id) - if self.tabs.active == 'tree-tab-requests': - self._update_requests() - self.requestView.updateBindings() - elif self.tabs.active == 'tree-tab-delete': - pass + match self.tabs.active: + case 'tree-tab-requests': + self._update_requests() + self.requestView.updateBindings() + case 'tree-tab-resource': + pass + case 'tree-tab-delete': + pass self.app.updateFooter() # type:ignore[attr-defined] diff --git a/acme/textui/ACMEFieldOriginator.py b/acme/textui/ACMEFieldOriginator.py index b80bab03..7bca89f5 100644 --- a/acme/textui/ACMEFieldOriginator.py +++ b/acme/textui/ACMEFieldOriginator.py @@ -6,24 +6,21 @@ # from __future__ import annotations +from dataclasses import dataclass +from typing import Optional from textual.app import ComposeResult from textual.containers import Container, Vertical from textual.widgets import Input, Label from textual.suggester import SuggestFromList from textual.validation import Function from textual import on +from textual.message import Message -# TODO This may has to be turned into a more generic field class - -idFieldOriginator = 'field-originator' -def validateOriginator(value: str) -> bool: - return value is not None and len(value) > 1 and value.startswith(('C', 'S', '/')) - -class ACMEFieldOriginator(Container): +class ACMEInputField(Container): DEFAULT_CSS = """ - ACMEFieldOriginator { + ACMEInputField { width: 1fr; height: 4; layout: horizontal; @@ -33,18 +30,18 @@ class ACMEFieldOriginator(Container): margin: 1 1 1 1; } - #field-originator-label { + #field-label { height: 1fr; content-align: left middle; align: left middle; } - #field-originator-input { + #field-input { height: 1fr; width: 1fr; } - #field-originator-pretty { + #field-pretty { height: 1fr; width: 1fr; margin-left: 1; @@ -52,18 +49,33 @@ class ACMEFieldOriginator(Container): } """ - def __init__(self, originator:str, suggestions:list[str] = []) -> None: + + @dataclass + class Submitted(Message): + input: ACMEInputField + """The *Input* widget that is being submitted.""" + value: str + """The value of the *Input* being submitted.""" + + + + def __init__(self, label:str = 'a label', + value:str = '', + suggestions:list[str] = [], + placeholder:str = '', + validators:Function = None, + id:str = None) -> None: # TODO list of originators as a suggestion - super().__init__(id = idFieldOriginator) - self.originator = originator + super().__init__(id = id) self.suggestions = suggestions - self.label = Label('[b]Originator[/b] ', id = 'field-originator-label') - self.input = Input(str(self.suggestions), - placeholder = 'Originator', + self.label = Label(f'[b]{label}[/b] ', id = f'field-label') + self.input = Input(value = value, + placeholder = placeholder, suggester = SuggestFromList(self.suggestions), - validators = Function(validateOriginator, 'Wrong originator format: Must start with "C", "S" or "/", and have length > 1.'), - id = 'field-originator-input') - self.msg = Label('jjj', id = 'field-originator-pretty') + validators = validators, + id = 'field-input') + self.msg = Label('', id = 'field-pretty') + def compose(self) -> ComposeResult: yield self.label @@ -75,16 +87,130 @@ def compose(self) -> ComposeResult: @on(Input.Changed) def show_invalid_reasons(self, event: Input.Changed) -> None: # Updating the UI to show the reasons why validation failed - if not event.validation_result.is_valid: + if event.validation_result and not event.validation_result.is_valid: self.msg.update(event.validation_result.failure_descriptions[0]) else: self.msg.update('') self.originator = event.value + @on(Input.Submitted, '#field-input') + async def submit(self, event: Input.Submitted) -> None: + self.post_message(self.Submitted(self, self.input.value)) + + + def setLabel(self, label:str) -> None: + """ Set the label of the field. + + Args: + label: The label to set. + """ + self.label.update(f'[b]{label}[/b] ') + + + @property + def value(self) -> str: + return self.input.value + + + @value.setter + def value(self, value:str) -> None: + self.input.value = value + + + + + +# TODO This may has to be turned into a more generic field class + +idFieldOriginator = 'field-originator' + +def validateOriginator(value: str) -> bool: + return value is not None and len(value) > 1 and value.startswith(('C', 'S', '/')) and not set(value) & set(' \t\n') + +class ACMEFieldOriginator(ACMEInputField): + def __init__(self, originator:str, suggestions:list[str] = []) -> None: + super().__init__(label = 'Originator', + suggestions = suggestions, + placeholder = 'Originator', + validators = Function(validateOriginator, + 'Wrong originator format: Must start with "C", "S" or "/", contain now white spaces, and have length > 1.') + ) + self.originator = originator + self.suggestions = suggestions + def update(self, originator:str, suggestions:list[str] = []) -> None: self.originator = originator self.suggestions = suggestions self.input.value = originator self.input.suggester = SuggestFromList(self.suggestions) + +# class ACMEFieldOriginator(Container): + +# DEFAULT_CSS = """ +# ACMEFieldOriginator { +# width: 1fr; +# height: 4; +# layout: horizontal; +# overflow: hidden hidden; +# # background: red; +# content-align: left middle; +# margin: 1 1 1 1; +# } + +# #field-originator-label { +# height: 1fr; +# content-align: left middle; +# align: left middle; +# } + +# #field-originator-input { +# height: 1fr; +# width: 1fr; +# } + +# #field-originator-pretty { +# height: 1fr; +# width: 1fr; +# margin-left: 1; +# color: red; +# } +# """ + +# def __init__(self, originator:str, suggestions:list[str] = []) -> None: +# # TODO list of originators as a suggestion +# super().__init__(id = idFieldOriginator) +# self.originator = originator +# self.suggestions = suggestions +# self.label = Label('[b]Originator[/b] ', id = 'field-originator-label') +# self.input = Input(str(self.suggestions), +# placeholder = 'Originator', +# suggester = SuggestFromList(self.suggestions), +# validators = Function(validateOriginator, 'Wrong originator format: Must start with "C", "S" or "/", and have length > 1.'), +# id = 'field-originator-input') +# self.msg = Label('jjj', id = 'field-originator-pretty') + +# def compose(self) -> ComposeResult: +# yield self.label +# with Vertical(): +# yield self.input +# yield self.msg + + +# @on(Input.Changed) +# def show_invalid_reasons(self, event: Input.Changed) -> None: +# # Updating the UI to show the reasons why validation failed +# if not event.validation_result.is_valid: +# self.msg.update(event.validation_result.failure_descriptions[0]) +# else: +# self.msg.update('') +# self.originator = event.value + + +# def update(self, originator:str, suggestions:list[str] = []) -> None: +# self.originator = originator +# self.suggestions = suggestions +# self.input.value = originator +# self.input.suggester = SuggestFromList(self.suggestions) + diff --git a/acme/textui/ACMEHeader.py b/acme/textui/ACMEHeader.py index 11c1ede9..53cef62f 100644 --- a/acme/textui/ACMEHeader.py +++ b/acme/textui/ACMEHeader.py @@ -6,8 +6,6 @@ # """ This module defines the header for the ACME text UI. """ -from datetime import datetime, timezone - from rich.text import Text from textual.app import ComposeResult, RenderResult from textual.widgets import Header, Label @@ -17,6 +15,7 @@ from ..services import CSE from ..etc.Constants import Constants from ..etc.DateUtils import toISO8601Date +from ..etc.DateUtils import utcDatetime class ACMEHeaderClock(HeaderClock): @@ -26,10 +25,8 @@ class ACMEHeaderClock(HeaderClock): DEFAULT_CSS = """ ACMEHeaderClock { background: transparent; -} - -HeaderClockSpace { width: 26; + } """ @@ -39,7 +36,7 @@ def render(self) -> RenderResult: Returns: The rendered clock. """ - return Text(f'{toISO8601Date(datetime.now(tz = timezone.utc), readable = True)[:19]} UTC') + return Text(f'{toISO8601Date(utcDatetime(), readable = True)[:19]} UTC') class ACMEHeaderTitle(HeaderTitle): diff --git a/acme/textui/ACMETuiApp.py b/acme/textui/ACMETuiApp.py index fb937e8a..813fcf08 100644 --- a/acme/textui/ACMETuiApp.py +++ b/acme/textui/ACMETuiApp.py @@ -8,12 +8,17 @@ """ from __future__ import annotations +from typing import Callable +from typing_extensions import Literal, get_args +import asyncio from enum import IntEnum, auto from textual.app import App, ComposeResult from textual import on from textual.widgets import Tab, Footer, TabbedContent, TabPane, Static from textual.binding import Binding from textual.design import ColorSystem +from textual.notifications import Notification, SeverityLevel + from ..textui.ACMEHeader import ACMEHeader from ..textui.ACMEContainerAbout import ACMEContainerAbout from ..textui.ACMEContainerConfigurations import ACMEContainerConfigurations @@ -23,10 +28,12 @@ from ..textui.ACMEContainerRequests import ACMEContainerRequests from ..textui.ACMEContainerTools import ACMEContainerTools from ..services import CSE +from ..etc.Types import ResourceTypes from ..helpers.BackgroundWorker import BackgroundWorkerPool + tabResources = 'tab-resources' tabRequests = 'tab-requests' tabRegistrations = 'tab-registrations' @@ -91,12 +98,20 @@ def __init__(self, textUI:TextUI.TextUI): self.textUI = textUI # Keep backward link to the textUI manager self.quitReason = ACMETuiQuitReason.undefined self.attributeExplanations = CSE.validator.getShortnameLongNameMapping() + + # Add the resource types to the attribute explanations + for n in ResourceTypes: + self.attributeExplanations[ResourceTypes(n).tpe()] = f'{ResourceTypes.fullname(n)} resource type' + # This is used to keep track of the current tab. # This is a bit different from the actual current tab from the self.tabs # attribute because at one point it is used to determine the previous tab. self.currentTab:Tab = None - #self.app.DEFAULT_COLORS = CUSTOM_COLORS - # _app.DEFAULT_COLORS = CUSTOM_COLORS + + # This is used to keep a pointer to the current event loop to use it + # for async calls from non-async functions. + # This is set in the on_load() function. + self.event_loop:asyncio.AbstractEventLoop = None self.tabs = TabbedContent() self.containerTree = ACMEContainerTree() @@ -108,6 +123,7 @@ def __init__(self, textUI:TextUI.TextUI): self.containerAbout = ACMEContainerAbout() self.debugConsole = Static('', id = 'debug-console') + def compose(self) -> ComposeResult: """Build the Main UI.""" yield ACMEHeader(show_clock = True) @@ -134,6 +150,7 @@ def compose(self) -> ComposeResult: def on_load(self) -> None: self.dark = self.textUI.theme == 'dark' self.syntaxTheme = 'ansi_dark' if self.dark else 'ansi_light' + self.event_loop = asyncio.get_event_loop() # self.design = CUSTOM_COLORS # self.refresh_css() @@ -178,30 +195,62 @@ def logDebug(self, msg:str) -> None: def scriptPrint(self, scriptName:str, msg:str) -> None: - self.containerTools.scriptPrint(scriptName, msg) + if self.containerTools: + self.containerTools.scriptPrint(scriptName, msg) def scriptLog(self, scriptName:str, msg:str) -> None: - self.containerTools.scriptLog(scriptName, msg) + if self.containerTools: + self.containerTools.scriptLog(scriptName, msg) def scriptLogError(self, scriptName:str, msg:str) -> None: - self.containerTools.scriptLogError(scriptName, msg) + if self.containerTools: + self.containerTools.scriptLogError(scriptName, msg) def scriptClearConsole(self, scriptName:str) -> None: - self.containerTools.scriptClearConsole(scriptName) + if self.containerTools: + self.containerTools.scriptClearConsole(scriptName) + def scriptShowNotification(self, message:str, title:str, severity:Literal['information', 'warning', 'error'], timeout:float) -> None: + + async def _call() -> None: + self.notify(message = message, title = title, severity = severity, timeout = timeout) + + if timeout is None: + timeout = Notification.timeout + + if severity is None: + severity = 'information' + elif severity not in get_args(SeverityLevel): + raise ValueError(f'Invalid severity level: {severity}') + + self.runAsyncTask(_call) + + def scriptVisualBell(self, scriptName:str) -> None: - BackgroundWorkerPool.runJob(lambda:self.containerTools.scriptVisualBell(scriptName)) - # self.containerTools.scriptVisualBell(scriptName) + if self.containerTools: + BackgroundWorkerPool.runJob(lambda:self.containerTools.scriptVisualBell(scriptName)) def refreshResources(self) -> None: - self.containerTree.update() + if self.containerTree: + self.containerTree.update() ######################################################################### + + def runAsyncTask(self, task:Callable) -> None: + """ Run an async task from a non-async function. + + Args: + task: The async task to run. + """ + if self.event_loop: + self.event_loop.create_task(task()) + + def restart(self) -> None: self.quitReason = ACMETuiQuitReason.restart self.exit() @@ -211,6 +260,7 @@ def cleanUp(self) -> None: """ Clean up the UI before exiting. """ self.containerTools.cleanUp() + self.event_loop = None # diff --git a/docs/ACMEScript-functions.md b/docs/ACMEScript-functions.md index 5d1a22ca..bfd84b00 100644 --- a/docs/ACMEScript-functions.md +++ b/docs/ACMEScript-functions.md @@ -20,6 +20,8 @@ The following built-in functions and variables are provided by the ACMEScript in | | [datetime](#datetime) | Return a timestamp | | | [defun](#defun) | Define a function | | | [dec](#dec) | Decrement a variable | +| | [dolist](#dolist) | Loop over a list | +| | [dotimes](#dotimes) | Loop over a numeric value | | | [eval](#eval) | Evaluate and execute a quoted list | | | [evaluate-inline](#evaluate-inline) | Enable and disable inline string evaluation | | | [get-json-attribute](#get-json-attribute) | Get a JSON attribute from a JSON structure | @@ -65,8 +67,10 @@ The following built-in functions and variables are provided by the ACMEScript in | | [Logical Operations](#logical-operations) | List of supported logical operations | | | [Mathematical Operations](#mathematical-operations) | List of supported mathematical operations | | [CSE](#_cse) | [clear-console](#clear-console) | Clear the console screen | +| | [cse-attribute-info](#cse-atribute-info) | Return information about one or more matching attributes | | | [cse-status](#cse-status) | Return the CSE's current status | | | [get-config](#get-config) | Retrieve a CSE's configuration setting | +| | [get-loglevel](#get-loglevel) | Retrieve the CSE's current log level | | | [get-storage](#get-storage) | Retrieve a value from the CSE's internal script-data storage | | | [has-config](#has-config) | Determine the existence of a CSE's configuration setting | | | [has-storage](#has-storage) | Determine the existence of a key/value in the CSE's internal script-data storage | @@ -90,6 +94,7 @@ The following built-in functions and variables are provided by the ACMEScript in | [Text UI](#_textui) | [open-web-browser](#open-web-browser) | Open a web page in the default browser | | | [set-category-description](#set-category-description) | Set the description for a whole category of scripts | | | [runs-in-tui](#runs-in-tui) | Determine whether the CSE runs in Text UI mode | +| | [tui-notify](#tui-notify) | Display a desktop-like notification | | | [tui-refresh-resources](#tui-refresh-resources) | Force a refresh of the Text UI's resource tree | | | [tui-visual-bell](#tui-visual-bell) | Shortly flashes the script's entry in the text UI's scripts list | | [Network](#_network) | [http](#http) | Send http requests | @@ -121,12 +126,14 @@ In addition more functions are provided in the file [ASFunctions.as](../init/ASF Concatenate and return the stringified versions of the symbol arguments. -See also: [to-string](#to-string) +Note, that this function will not add spaces between the symbols. One can use the [nl](#nl) and [sp](#sp) functions to add newlines and spaces. + +See also: [nl](#nl), [sp](#sp), [to-string](#to-string) Example: ```lisp -(. "Time: " (datetime)) ;; Returns "Time: 20230308T231049.934630" +(. "Time:" sp (datetime)) ;; Returns "Time: 20230308T231049.934630" ``` [top](#top) @@ -234,7 +241,7 @@ The `case` function implements the functionality of a `switch...case` statement The *key* s-expression is evaluated and its value taken for the following comparisons. After this expression a number of lists may be given. -Each of these list contains two symbols that are handled in order: The first symbol evaluates to a value that is compared to the result of the *key* s-expression. If there is a match then the second s-exprersion is evaluated, and then the comparisons are stopped and the *case* function returns. +Each of these list contains two symbols that are handled in order: The first symbol evaluates to a value that is compared to the result of the *key* s-expression. If there is a match then the second s-expression is evaluated, and then the comparisons are stopped and the *case* function returns. The special symbol *otherwise* for a *condition* s-expression always matches and can be used as a default or fallback case . @@ -314,13 +321,52 @@ Example: --- + + +### defun + +`(defun )` + +The `defun` function defines a new function. + +The first argument to this function is a string and specifies the new function's name. A function definition overrides already user-defined or built-in functions with the same name. + +The second argument is a symbol list with argument names for the function. Arguments act as function-local variables that can be used in the function body. + +The third argument is an s-expression that is evaluated as the function body. + +The result of a function is the result of the expression that is evaluated last in a function evaluation. + +See also: [lambda](#lambda), [return](#return) + +Examples: + +```lisp +(defun greeting (name) ;; define the function + (print "hello" name)) +(greeting "Arthur") ;; call the function + +;; Fibonacci +(defun fib (n) ;; Define the function + (if (< n 2) + n + (+ (fib (- n 1)) + (fib (- n 2))) + )) +(fib 10) ;; Returns 55 +``` + +[top](#top) + +--- + ### dec `(dec [])` -The `dec` function decrements a provided variable. The default for the increment is 1, but can be given as an optional second argument. If this argument is provided then the variable is decemented by this value. The value can be an integer or a float. +The `dec` function decrements a provided variable. The default for the increment is 1, but can be given as an optional second argument. If this argument is provided then the variable is decremented by this value. The value can be an integer or a float. The function returns the variable's new value. @@ -338,39 +384,60 @@ Example: --- - + -### defun +### dolist -`(defun )` +`(dolist ( []) (+))` -The `defun` function defines a new function. +The `dolist` function loops over a list. +The first arguments is a list that contains a loop variable, a list to iterate over, and an optional +`result` variable. The second argument is a list that contains one or more s-expressions that are executed in the loop. -The first argument to this function is a string and specifies the new function's name. A function definition overrides already user-defined or built-in functions with the same name. +If the `result variable` is specified then the loop returns the value of that variable, otherwise `nil`. -The second argument is a symbol list with argument names for the function. Arguments act as function-local variables that can be used in the function body. +See also: [dotimes](#dotimes), [while](#while) -The third argument is an s-expression that is evaluated as the function body. +Example: -The result of a function is the result of the expression that is evaluated last in a function evaluation. +```lisp +(dolist (i '(1 2 3 4 5 6 7 8 9 10)) + (print i)) ;; print 1..10 -See also: [lambda](#lambda), [return](#return) +(setq result 0) +(dolist (i '(1 2 3 4 5 6 7 8 9 10) result) + (setq result (+ result i))) ;; sum 1..10 +(print result) ;; 55 +``` -Examples: +[top](#top) + +--- + + + +### dotimes + +`(dotimes ( []) (+))` + +The `dotimes` function provides a simple numeric loop functionality. +The first arguments is a list that contains a loop variable that starts at 0, the loop `count` (which must be a non-negative number), and an optional +`result` variable. The second argument is a list that contains one or more s-expressions that are executed in the loop. + +If the `result variable` is specified then the loop returns the value of that variable, otherwise `nil`. + +See also: [dolist](#dolist), [while](#while) + +Example: ```lisp -(defun greeting (name) ;; define the function - (print "hello" name)) -(greeting "Arthur") ;; call the function +(dotimes (i 10) + (print i)) ;; print 0..9 -;; Fibonacci -(defun fib (n) ;; Define the function - (if (< n 2) - n - (+ (fib (- n 1)) - (fib (- n 2))) - )) -(fib 10) ;; Returns 55 +(setq result 0) +(dotimes (i 10 result) + (setq result (+ result i))) ;; sum 0..9 +(print result) ;; 45 ``` [top](#top) @@ -463,12 +530,16 @@ Examples: --- + + ### if `(if [])` The `if` function works like an “if-then-else” statement in other programing languages. The first argument is a boolean expression. If it evaluates to *true* then the second argument is executed. If it evaluates to *false* then the third (optional) argument is executed, if present. +The boolean expression can be any s-expression that evaluates to a boolean value or *nil*, or a list or a string. *nil* values, empty lists, or zero-length strings evaluate to *false*, or to *true* otherwise. + Example: ```lisp @@ -510,7 +581,7 @@ Example: `(inc [])` -The `inc` function increments a provided variable. The default for the increment is 1, but can be given as an optional second argument. If this argument is provided then the variable is incemented by this value. The value can be an integer or a float. +The `inc` function increments a provided variable. The default for the increment is 1, but can be given as an optional second argument. If this argument is provided then the variable is incremented by this value. The value can be an integer or a float. The function returns the variable's new value. @@ -1030,7 +1101,7 @@ Example: `(round [])` -The `round` function rounds a number to *precission* digits after the decimal point. The default is 0, meaning to round to nearest integer. +The `round` function rounds a number to *precision* digits after the decimal point. The default is 0, meaning to round to nearest integer. Example: @@ -1092,7 +1163,7 @@ Example: `(sleep )` -The `sleep` function adds a delay to the script execution. The evaludation stops for a number of seconds. The delay could be provided as an integer or float number. +The `sleep` function adds a delay to the script execution. The evaluation stops for a number of seconds. The delay could be provided as an integer or float number. If the script execution timeouts during a sleep, the function is interrupted and all subsequent s-expressions are not evaluated. @@ -1116,7 +1187,7 @@ Example: The `slice` function returns the slice of a list or a string. -The behaviour is the same as slicing in Python, except that both *start* and *end* must be provided. The first argument is the *start* (including) of the slice, the second is the *end* (exlcuding) of the slice. The fourth argument is the list or string to slice. +The behavior is the same as slicing in Python, except that both *start* and *end* must be provided. The first argument is the *start* (including) of the slice, the second is the *end* (excluding) of the slice. The fourth argument is the list or string to slice. Example: @@ -1281,9 +1352,11 @@ The `while` function implements a loop functionality. A `while` loop continues to run when the first *guard* s-expression evaluates to *true*. Then the *body* s-expression is evaluated. After this the *guard* is evaluated again and the the loops continues or the `while` function returns. +The boolean guard can be any s-expression that evaluates to a boolean value or *nil*, or a list or a string. *nil* values, empty lists, or zero-length strings evaluate to *false*, or to *true* otherwise. + The `while` function returns the result of the last evaluated s-expression in the *body*. -See also: [return](#return) +See also: [doloop](#doloop), [dotime](#dotimes), [return](#return) Example: @@ -1421,6 +1494,35 @@ Example: [top](#top) +--- + + + +### cse-attribute-info + +`(cse-attribute-info )` + +Return a list of CSE attribute infos for the attribute `name``. +The search is done over the short and long names of the attributes applying +a fuzzy search when searching the long names. + + +The function returns a quoted list where each entry is another quoted list +with the following symbols: + +- attribute short name +- attribute long name +- attribute type + +Example: + +```lisp +(cse-attribute-info "acop") ;; Returns ( ( "acop" "accessControlOperations" "nonNegInteger" ) ) +``` + +[top](#top) + + --- @@ -1470,6 +1572,29 @@ Examples: --- + + +### get-loglevel + +`(get-loglevel)` + +The `get-loglevel` function retrieves a the CSE's current log level setting. The return value will be one of the following strings: + +- "DEBUG" +- "INFO" +- "WARNING" +- "ERROR" +- "OFF" + +Example: +```lisp +(get-loglevel) ;; Return, for example, INFO +``` + +[top](#top) + +--- + ### get-storage @@ -1559,7 +1684,7 @@ Example: `(log-divider [])` -The `log-divider` function inserts a divider line in the CSE's *DEBUG* log. It can help to easily identifiy the different sections when working with many requests. An optional (short) message can be provided in the argument. +The `log-divider` function inserts a divider line in the CSE's *DEBUG* log. It can help to easily identify the different sections when working with many requests. An optional (short) message can be provided in the argument. Examples: @@ -2106,6 +2231,42 @@ Examples: --- + + +### tui-notify + +`(tui-notify [] [:str>] [])` + +Show a desktop-like notification in the TUI. + +This function is only available in TUI mode. It has the following arguments: + +- message: The message to show. +- title: (Optional) The title of the notification. +- severity: (Optional) The severity of the notification. This can be one of the following values: + - information (the default) + - warning + - error +- timeout: (Optional) The timeout in seconds after which the notification will disappear again. If not specified, the notification will disappear after 3 seconds. + +If one of the optional arguments needs to be left out, a *nil* symbol must be used instead. +The function returns NIL. + +Examples: + +```lisp +(tui-notify "a message") ;; Displays "a message" in an information notification for 3 seconds +(tui-notify "a message" "a title") ;; Displays "a message" with title "a title in an information notification for 3 seconds +(tui-notify "a message") ;; Displays "a message" in an information notification for 3 seconds +(tui-notify "a message" nil "warning") ;; Displays "a message" in a warning notification, no title +(tui-notify "a message" nil nil 10) ;; Displays "a message" in an information notification, no title, for 3 seconds + +``` + +[top](#top) + +--- + ### tui-refresh-resources diff --git a/docs/ACMEScript-metatags.md b/docs/ACMEScript-metatags.md index 41c2e7ba..6d2c1226 100644 --- a/docs/ACMEScript-metatags.md +++ b/docs/ACMEScript-metatags.md @@ -28,6 +28,7 @@ Meta tags are keyword that start with an at-sign "@". They can appear anywhere i | [Text UI](#_textui) | [@category](#meta_category) | Add a category to the script for the text UI's *Tools* section | | | [@tuiAutoRun](#meta_tuiAutoRun) | Automatically run scripts when selecting them, and optionally repeat | | | [@tuiExecuteButton](#meta_tuiExecuteButton) | Configure the script's `Execute` button in the text UI | +| | [@tuiInput](#meta_tuiInput) | Add an input field for script arguments in the text UI | | | [@tuiSortOrder](#meta_tuiSortOrder) | Specify the sort order for scripts in a category in the text UI's *Tools* section | | | [@tuiTool](#meta_tuiTool) | Tag a script for listing in the text UI's *Tools* section | @@ -54,9 +55,9 @@ They can be accessed like any other environment variable, for example: The `@at` meta tag specifies a time / date pattern when a script should be executed. This pattern follows the Unix [crontab](https://crontab.guru/crontab.5.html) pattern. -A crontab pattern consists of the following five fields: +A crontab pattern consists of the following six fields: -`minute hour dayOfMonth month dayOfWeek` +`second minute hour dayOfMonth month dayOfWeek year` Each field is mandatory and must comply to the following values: @@ -68,9 +69,9 @@ Each field is mandatory and must comply to the following values: Example: ```lisp ;; Run a script every 5 minutes -@at */5 * * * * +@at 0 */5 * * * * * ;; Run a script every Friday at 2:30 am -@at 30 2 * * 4 +@at 0 30 2 * * 4 * ``` [top](#top) @@ -434,9 +435,11 @@ Example: ### @tuiExecuteButton -`@tuiExecuteButton [] ` +`@tuiExecuteButton []` -This meta tag configures the script's `Execute` button of the text UI. The following configurations are possible +This meta tag configures the script's `Execute` button of the text UI. + +The following configurations are possible: - Not present in a script: The button displays the default text "Execute". - Present in a script with an argument: The argument is used for the button's label. @@ -452,19 +455,24 @@ Example: --- - + -### @tuiTool +### @tuiInput -`@tuiTool` +`@tuiInput []` -This meta tag categorizes a script as a tool. Scripts marked as *tuiTools* are listed in the Text UI's *Tools* -section. +This meta tag adds an input field to text UI. Text entered in this field is passed as +arguments to the script that can be access using the [argv](ACMEScript-functions.md#argv) function. + +The following configurations are possible: + +- Not present in a script or without a label: No input field is added. +- Present in a script with an argument: The argument is used for the input field's label. Example: ```lisp -@tuiTool +@tuiInput A Label ``` [top](#top) @@ -492,5 +500,24 @@ Example: --- + + +### @tuiTool + +`@tuiTool` + +This meta tag categorizes a script as a tool. Scripts marked as *tuiTools* are listed in the Text UI's *Tools* +section. + +Example: + +```lisp +@tuiTool +``` + +[top](#top) + +--- + [← ACMEScript](ACMEScript.md) [← README](../README.md) diff --git a/docs/ACMEScript.md b/docs/ACMEScript.md index 098dd9a9..7c1d373c 100644 --- a/docs/ACMEScript.md +++ b/docs/ACMEScript.md @@ -10,6 +10,7 @@ The \[ACME] CSE supports a lisp-based scripting language, called ACMEScript, tha - Update CSE configuration settings. - Call internal CSE functions. - Run scheduled script jobs. +- Implement tool scripts for the [Text UI](TextUI.md). **Table of Contents** @@ -75,9 +76,13 @@ Example: ### Variables and Function Scopes -The scope of variables is global to a script execution, and variables are removed between script runs. +Variables are global to a script execution. Global variables that are updated in a function call are updated globally. Variables that are not defined globally but are defined in a function's scope do only exist in the scope of the function and sub-functions calls. -In addition to the normal script variables the runtime environment may pass extra environment variables to the script. +In addition to the normal script variables the runtime environment may pass extra environment variables to the script. They are mapped to the script's global variables and can be retrieved like any other global variable (but not updated or deleted). Variables that are set during the execution of a script have precedence over environment variables with the same name. + +Variables are removed between script runs. + +Variable names are case-sensitive. ### Quoting @@ -113,17 +118,19 @@ Meta tags are described in [a separate document](ACMEScript-metatags.md). ## Loading and Running Scripts -Scripts are stored in the *init* directory, and ind a list of directories that [can be specified](Configuration.md#scripting) in the configuration file. +Scripts are stored in and are imported from the *init* directory and in sub-directories, which names end with *.scripts*, of the *init* directory. +One can also specify a [list of directories](Configuration.md#scripting) in the configuration file with additional scripts that will be imported. All files with the extension "*.as*" are treated as ACMEScript files and are automatically imported during CSE startup and also imported and updated during runtime. There are different ways to run scripts: -- They can be run from the console interface with the `R` (Run) command. -- They can be run by a keypress from the console interface (see [onKey](ACMEScript-metatags.md#meta_onkey) meta tag). -- They can be scheduled to run at specific times or dates. This is similar to the Unix cron system (see [at](ACMEScript-metatags.md#meta_at) meta tag). -- They can be scheduled to run at certain events. Currently, the CSE [init](ACMEScript-metatags.md#meta_init), [onStartup](ACMEScript-metatags.md#meta_onstartup), [onRestart](ACMEScript-metatags.md#meta_onrestart), and [onShutdown](ACMEScript-metatags.md#meta_onshutdown) events are supported. -- They can be run as a receiver of a NOTIFY request from the CSE. See [onNotification](ACMEScript-metatags.md#meta_onnotification) meta tag. +- Scripts can be run from the console interface with the `R` (Run) command. +- They can also be run by a keypress from the console interface (see [onKey](ACMEScript-metatags.md#meta_onkey) meta tag). +- Scripts can be scheduled to run at specific times or dates. This is similar to the Unix cron system (see [at](ACMEScript-metatags.md#meta_at) meta tag). +- It is possible to schedule scripts to run at certain events. Currently, the CSE [init](ACMEScript-metatags.md#meta_init), [onStartup](ACMEScript-metatags.md#meta_onstartup), [onRestart](ACMEScript-metatags.md#meta_onrestart), and [onShutdown](ACMEScript-metatags.md#meta_onshutdown) events are supported. +- Scrips can be run as a receiver of a NOTIFY request from the CSE. See [onNotification](ACMEScript-metatags.md#meta_onnotification) meta tag. - They can also be run as a command of the [Upper Tester Interface](Operation.md#upper_tester). +- Scripts can be integrated as tools in the [Text UI](TextUI.md). See also the available [meta-tags](ACMEScript-metatags.md#_textui) for available tags. @@ -175,7 +182,7 @@ In the following example the s-expression `(+ 1 2)` is evaluated when the string Evaluation can be locally disabled by escaping the opening part: ```lisp - (print "1 + 2 = \\${ + 1 2 }") ;; Prints "1 + 2 = [(+ 1 2)]" + (print "1 + 2 = \\${ + 1 2 }") ;; Prints "1 + 2 = ${ + 1 2 )}" ``` Evaluation can also be disabled and enabled by using the [evaluate-inline](ACMEScript-functions.md#evaluate-inline) function. diff --git a/docs/CSE.uxf b/docs/CSE.uxf index 8c65525e..547d14c9 100644 --- a/docs/CSE.uxf +++ b/docs/CSE.uxf @@ -1,14 +1,14 @@ - + - 12 + 8 UMLGeneric - 780 - 480 - 192 - 96 + 528 + 320 + 128 + 64 HTTP Server REST @@ -24,10 +24,10 @@ drawLine(21.8,11.4,20,15) Relation - 852 - 348 - 36 - 156 + 576 + 232 + 24 + 104 lt=()- @@ -36,10 +36,10 @@ drawLine(21.8,11.4,20,15) Relation - 792 - 564 - 84 - 84 + 536 + 376 + 56 + 56 lt=)- fontsize=8 @@ -50,10 +50,10 @@ requests Relation - 900 - 672 - 108 - 84 + 608 + 448 + 72 + 56 lt=()- handle resource @@ -65,10 +65,10 @@ fontsize=8 UMLGeneric - 780 - 732 - 192 - 96 + 528 + 488 + 128 + 64 Dispatcher symbol=component @@ -79,10 +79,10 @@ transparency=0 UMLGeneric - 780 - 912 - 192 - 96 + 528 + 608 + 128 + 64 Resource Storage symbol=component @@ -97,10 +97,10 @@ drawLine(21.8,11.4,20,15) Relation - 864 - 864 - 60 - 72 + 584 + 576 + 40 + 48 lt=()- BREAD @@ -111,10 +111,10 @@ fontsize=8 Relation - 852 - 816 - 60 - 72 + 576 + 544 + 40 + 48 lt=)- 20.0;30.0;20.0;10.0 @@ -122,10 +122,10 @@ fontsize=8 UMLGeneric - 1020 - 912 - 192 - 96 + 688 + 608 + 128 + 64 oneM2M Resource Classes @@ -137,10 +137,10 @@ transparency=0 UMLGeneric - 540 - 912 - 192 - 96 + 368 + 608 + 128 + 64 Importer symbol=component @@ -151,10 +151,10 @@ transparency=0 Relation - 612 - 852 - 96 - 84 + 416 + 568 + 64 + 56 lt=)- add @@ -166,10 +166,10 @@ fontsize=8 UMLGeneric - 300 - 912 - 192 - 96 + 208 + 608 + 128 + 64 Configuration symbol=component @@ -180,10 +180,10 @@ transparency=0 Relation - 384 - 864 - 72 - 72 + 264 + 576 + 48 + 48 lt=()- set/get @@ -193,10 +193,10 @@ fontsize=8 UMLNote - 744 - 528 - 72 - 36 + 504 + 352 + 48 + 24 Flask Requests @@ -211,10 +211,10 @@ fontsize=10 UMLNote - 924 - 984 - 72 - 36 + 624 + 656 + 48 + 24 TinyDB transparency=0 @@ -228,10 +228,10 @@ fontsize=10 UMLGeneric - 60 - 912 - 192 - 96 + 48 + 608 + 128 + 64 Logging symbol=component @@ -242,10 +242,10 @@ transparency=0 Relation - 144 - 852 - 96 - 84 + 104 + 568 + 64 + 56 lt=()- info/debug @@ -256,10 +256,10 @@ fontsize=8 UMLGeneric - 792 - 1092 - 168 - 36 + 536 + 728 + 112 + 24 Resources symbol=artifact @@ -272,10 +272,10 @@ fontsize=10 UMLGeneric - 792 - 1140 - 168 - 36 + 536 + 760 + 112 + 24 Identifiers symbol=artifact @@ -288,10 +288,10 @@ fontsize=10 UMLGeneric - 780 - 1056 - 192 - 276 + 528 + 704 + 248 + 184 DB Tables transparency=0 @@ -300,10 +300,10 @@ transparency=0 Relation - 864 - 996 - 36 - 84 + 584 + 664 + 24 + 56 lt=- fontsize=8 @@ -312,12 +312,12 @@ fontsize=8 UMLGeneric - 540 - 1068 - 192 - 36 + 368 + 760 + 128 + 24 - Initial Resources + Scripts symbol=artifact halign=center layer=1 @@ -328,22 +328,22 @@ fontsize=10 Relation - 624 - 996 - 36 - 96 + 424 + 664 + 24 + 56 lt=- fontsize=8 - 10.0;60.0;10.0;10.0 + 10.0;50.0;10.0;10.0 UMLGeneric - 72 - 1068 - 168 - 36 + 56 + 712 + 112 + 24 Rotating Logs symbol=artifact @@ -356,10 +356,10 @@ fontsize=10 UMLGeneric - 156 - 1032 - 24 - 24 + 112 + 688 + 16 + 16 lw=0 fontsize=12 @@ -369,10 +369,10 @@ fontsize=12 Relation - 144 - 996 - 36 - 84 + 104 + 664 + 24 + 56 lt=- fontsize=8 @@ -381,10 +381,10 @@ fontsize=8 UMLGeneric - 864 - 384 - 60 - 36 + 584 + 256 + 40 + 24 lw=0 fontsize=8 @@ -396,10 +396,10 @@ via http UMLGeneric - 300 - 1068 - 192 - 36 + 208 + 712 + 128 + 24 Configuration File symbol=artifact @@ -412,10 +412,10 @@ fontsize=10 Relation - 384 - 996 - 36 - 96 + 264 + 664 + 24 + 64 lt=- fontsize=8 @@ -424,10 +424,10 @@ fontsize=8 Relation - 1116 - 852 - 120 - 84 + 752 + 568 + 80 + 56 lt=)- access resources @@ -439,9 +439,9 @@ fontsize=8 UMLPackage 24 - 420 - 2424 - 948 + 280 + 1984 + 632 ACME CSE -- @@ -453,10 +453,10 @@ transparency=0 UMLGeneric - 72 - 1116 - 168 - 36 + 56 + 744 + 112 + 24 Console symbol=artifact @@ -469,10 +469,10 @@ fontsize=10 UMLGeneric - 60 - 1056 - 192 - 108 + 48 + 704 + 128 + 72 transparency=0 @@ -480,10 +480,10 @@ fontsize=10 UMLGeneric - 540 - 732 - 192 - 96 + 368 + 488 + 128 + 64 Security Manager symbol=component @@ -494,10 +494,10 @@ transparency=0 Relation - 588 - 660 - 96 - 96 + 400 + 440 + 64 + 64 lt=()- check @@ -508,10 +508,10 @@ fontsize=8 Relation - 1104 - 672 - 84 - 84 + 744 + 448 + 56 + 56 lt=()- handle @@ -525,10 +525,10 @@ fontsize=8 UMLGeneric - 396 - 1044 - 24 - 24 + 272 + 696 + 16 + 16 lw=0 fontsize=12 @@ -538,10 +538,10 @@ fontsize=12 UMLGeneric - 636 - 1044 - 24 - 24 + 432 + 688 + 16 + 16 lw=0 fontsize=12 @@ -551,10 +551,10 @@ fontsize=12 UMLGeneric - 876 - 1032 - 24 - 24 + 592 + 688 + 16 + 16 lw=0 fontsize=12 @@ -564,10 +564,10 @@ fontsize=12 Relation - 960 - 924 - 84 - 48 + 648 + 616 + 56 + 32 lt=- stores @@ -578,10 +578,10 @@ fontsize=8 UMLGeneric - 996 - 948 - 24 - 24 + 672 + 632 + 16 + 16 lw=0 fontsize=12 @@ -591,10 +591,10 @@ fontsize=12 UMLNote - 924 - 1044 - 96 - 36 + 624 + 696 + 64 + 24 In memory or file system @@ -609,10 +609,10 @@ layer=1 Relation - 672 - 672 - 96 - 84 + 456 + 448 + 64 + 56 lt=)- fontsize=8 @@ -623,10 +623,10 @@ resources UMLGeneric - 300 - 732 - 192 - 96 + 208 + 488 + 128 + 64 Notification Manager symbol=component @@ -641,10 +641,10 @@ drawLine(21.8,11.4,20,15) Relation - 900 - 348 - 60 - 156 + 608 + 232 + 40 + 104 lt=)- 20.0;20.0;20.0;110.0 @@ -652,10 +652,10 @@ drawLine(21.8,11.4,20,15) UMLGeneric - 924 - 384 - 72 - 36 + 624 + 256 + 48 + 24 lw=0 fontsize=8 @@ -667,10 +667,10 @@ via http Relation - 396 - 660 - 72 - 96 + 272 + 440 + 48 + 64 lt=()- add/del @@ -681,10 +681,10 @@ fontsize=8 Relation - 456 - 672 - 96 - 84 + 312 + 448 + 64 + 56 lt=)- fontsize=8 @@ -695,10 +695,10 @@ resources UMLGeneric - 1260 - 732 - 192 - 96 + 848 + 488 + 128 + 64 remoteCSE Manager symbol=component @@ -713,10 +713,10 @@ drawLine(21.8,11.4,20,15) UMLGeneric - 792 - 1188 - 168 - 36 + 536 + 792 + 112 + 24 Subscriptions symbol=artifact @@ -726,26 +726,13 @@ transparency=0 fontsize=10 - - UMLGeneric - - 552 - 1080 - 192 - 36 - - halign=left -layer=-1 -transparency=0 - - Relation - 1068 - 852 - 72 - 84 + 720 + 568 + 48 + 56 lt=()- fontsize=8 @@ -757,10 +744,10 @@ set/get Relation - 1032 - 852 - 60 - 36 + 696 + 568 + 40 + 24 lt=..> fontsize=8 @@ -769,10 +756,10 @@ fontsize=8 Relation - 768 - 348 - 60 - 156 + 520 + 232 + 40 + 104 lt=)- 20.0;20.0;20.0;110.0 @@ -780,10 +767,10 @@ fontsize=8 UMLGeneric - 792 - 384 - 48 - 36 + 536 + 256 + 32 + 24 lw=0 fontsize=8 @@ -795,10 +782,10 @@ via http Relation - 924 - 564 - 72 - 84 + 624 + 376 + 48 + 56 lt=()- send @@ -809,10 +796,10 @@ fontsize=8 Relation - 312 - 672 - 108 - 84 + 216 + 448 + 72 + 56 lt=)- fontsize=8 @@ -822,10 +809,10 @@ send NOTIFY UMLGeneric - 1056 - 216 - 192 - 96 + 712 + 144 + 128 + 64 WebUI symbol=component @@ -836,10 +823,10 @@ transparency=0 Relation - 948 - 240 - 132 - 60 + 640 + 160 + 88 + 40 lt=)- 20.0;20.0;90.0;20.0 @@ -847,10 +834,10 @@ transparency=0 Relation - 960 - 300 - 240 - 252 + 648 + 200 + 160 + 168 lt=- serves @@ -861,10 +848,10 @@ fontsize=8 UMLGeneric - 960 - 216 - 60 - 36 + 648 + 144 + 40 + 24 lw=0 fontsize=8 @@ -876,10 +863,10 @@ via http UMLGeneric - 1740 - 732 - 192 - 96 + 1168 + 488 + 128 + 64 Announcement Manager @@ -895,10 +882,10 @@ drawLine(21.8,11.4,20,15) UMLGeneric - 1500 - 732 - 192 - 96 + 1008 + 488 + 128 + 64 Group Manager @@ -910,10 +897,10 @@ transparency=0 Relation - 1620 - 660 - 84 - 96 + 1088 + 440 + 56 + 64 handle group @@ -925,10 +912,10 @@ fontsize=8 Relation - 1524 - 672 - 96 - 84 + 1024 + 448 + 64 + 56 lt=)- access @@ -939,10 +926,10 @@ fontsize=8 UMLGeneric - 1500 - 912 - 192 - 96 + 1008 + 608 + 128 + 64 Statistics symbol=component @@ -953,10 +940,10 @@ transparency=0 UMLGeneric - 792 - 1236 - 168 - 36 + 536 + 824 + 112 + 24 CSE Statistics symbol=artifact @@ -969,10 +956,10 @@ fontsize=10 Relation - 1572 - 840 - 72 - 96 + 1056 + 560 + 48 + 64 lt=)- store @@ -983,10 +970,10 @@ fontsize=8 Relation - 1512 - 852 - 60 - 84 + 1016 + 568 + 40 + 56 lt=()- recv @@ -997,10 +984,10 @@ fontsize=8 UMLGeneric - 1260 - 912 - 192 - 96 + 848 + 608 + 128 + 64 Event Manager symbol=component @@ -1015,10 +1002,10 @@ drawLine(21.8,11.4,20,15) Relation - 1356 - 852 - 60 - 84 + 912 + 568 + 40 + 56 lt=()- recv @@ -1029,10 +1016,10 @@ fontsize=8 Relation - 1272 - 852 - 96 - 84 + 856 + 568 + 64 + 56 lt=()- manage @@ -1044,10 +1031,10 @@ fontsize=8 Relation - 1392 - 852 - 72 - 84 + 936 + 568 + 48 + 56 lt=)- fwd @@ -1058,47 +1045,23 @@ fontsize=8 Relation - 1632 - 852 - 72 - 84 + 1096 + 568 + 48 + 56 lt=()- statistics fontsize=8 10.0;10.0;10.0;50.0 - - UMLNote - - 516 - 984 - 96 - 60 - - *TODO* - -Keep watching -the directory -transparency=0 -bg=yellow -valign=top -halign=center -layer=1 -fontsize=8 -customelement= -drawArc(10,10,10,10,0,270,true) transparency=100 -drawLine(16.5,14,20,15) -drawLine(21.8,11.4,20,15) - - UMLGeneric - 60 - 732 - 192 - 96 + 48 + 488 + 128 + 64 Registration Manager symbol=component @@ -1113,10 +1076,10 @@ drawLine(21.8,11.4,20,15) Relation - 96 - 672 - 72 - 84 + 72 + 448 + 48 + 56 lt=()- register @@ -1126,10 +1089,10 @@ fontsize=8 Relation - 168 - 672 - 96 - 84 + 120 + 448 + 64 + 56 lt=)- fontsize=8 @@ -1137,32 +1100,13 @@ access resources 20.0;20.0;20.0;50.0 - - UMLNote - - 516 - 804 - 108 - 48 - - *TODO* - -R4 Attribute ACP -transparency=0 -bg=yellow -valign=top -halign=center -layer=1 -fontsize=8 - - UMLGeneric - 1740 - 912 - 192 - 96 + 1168 + 608 + 128 + 64 Validation Manager @@ -1174,10 +1118,10 @@ transparency=0 Relation - 1824 - 852 - 84 - 84 + 1224 + 568 + 56 + 56 lt=()- handle @@ -1188,10 +1132,10 @@ fontsize=8 UMLGeneric - 1020 - 732 - 192 - 96 + 688 + 488 + 128 + 64 Request Manager @@ -1203,10 +1147,10 @@ transparency=0 Relation - 1020 - 660 - 108 - 96 + 688 + 440 + 72 + 64 lt=()- handle @@ -1219,10 +1163,10 @@ fontsize=8 Relation - 1752 - 660 - 108 - 96 + 1176 + 440 + 72 + 64 handle resource @@ -1234,10 +1178,10 @@ fontsize=8 Relation - 1296 - 660 - 108 - 96 + 872 + 440 + 72 + 64 lt=()- handle @@ -1249,10 +1193,10 @@ fontsize=8 Relation - 1836 - 648 - 120 - 108 + 1232 + 432 + 80 + 72 remote resource @@ -1264,10 +1208,10 @@ fontsize=8 Relation - 780 - 672 - 96 - 84 + 528 + 448 + 64 + 56 lt=)- security & @@ -1279,10 +1223,10 @@ fontsize=8 UMLNote - 72 - 804 - 168 - 24 + 56 + 536 + 112 + 16 incl. resource expirations transparency=0 @@ -1296,10 +1240,10 @@ fontsize=10 UMLPackage - 1020 - 24 - 264 - 324 + 688 + 16 + 176 + 216 Web UI transparency=0 @@ -1309,10 +1253,10 @@ layer=-2 UMLGeneric - 1056 - 84 - 192 - 96 + 712 + 56 + 128 + 64 HTTP Server REST @@ -1328,10 +1272,10 @@ drawLine(21.8,11.4,20,15) Relation - 1092 - 168 - 60 - 72 + 736 + 112 + 40 + 48 lt=- serves @@ -1341,10 +1285,10 @@ fontsize=8 Relation - 948 - 108 - 132 - 60 + 640 + 72 + 88 + 40 lt=)- 20.0;20.0;90.0;20.0 @@ -1352,10 +1296,10 @@ fontsize=8 UMLGeneric - 960 - 84 - 60 - 36 + 648 + 56 + 40 + 24 lw=0 fontsize=8 @@ -1367,10 +1311,10 @@ Mca Relation - 1176 - 168 - 96 - 72 + 792 + 112 + 64 + 48 lt=- Mca via proxy @@ -1380,10 +1324,10 @@ fontsize=8 UMLGeneric - 792 - 1284 - 168 - 36 + 536 + 856 + 112 + 24 Batch Notifications symbol=artifact @@ -1396,10 +1340,10 @@ fontsize=10 Relation - 1164 - 660 - 96 - 96 + 784 + 440 + 64 + 64 lt=)- send @@ -1411,10 +1355,10 @@ fontsize=8 Relation - 1380 - 672 - 96 - 84 + 928 + 448 + 64 + 56 lt=)- fontsize=8 @@ -1425,10 +1369,10 @@ resources UMLClass - 0 + 8 0 - 2472 - 1392 + 2016 + 928 lw=0 bg=white @@ -1439,10 +1383,10 @@ layer=-2 UMLNote - 204 - 984 - 72 - 36 + 144 + 656 + 48 + 24 rich transparency=0 @@ -1456,10 +1400,10 @@ fontsize=10 UMLGeneric - 1980 - 732 - 192 - 96 + 1328 + 488 + 128 + 64 TimeSeries Manager @@ -1471,10 +1415,10 @@ transparency=0 Relation - 1992 - 660 - 84 - 96 + 1336 + 440 + 56 + 64 lt=()- handle new @@ -1486,10 +1430,10 @@ fontsize=8 UMLGeneric - 1260 - 480 - 192 - 96 + 848 + 320 + 128 + 64 Console symbol=component @@ -1500,10 +1444,10 @@ transparency=0 Relation - 1344 - 360 - 84 - 144 + 904 + 240 + 56 + 96 lt=()- m1=handle\nuser input @@ -1513,10 +1457,10 @@ fontsize=8 Relation - 1368 - 564 - 84 - 84 + 920 + 376 + 56 + 56 lt=)- various @@ -1526,10 +1470,10 @@ fontsize=8 Relation - 2076 - 660 - 108 - 96 + 1392 + 440 + 72 + 64 lt=)- send @@ -1541,10 +1485,10 @@ fontsize=8 UMLGeneric - 540 - 480 - 192 - 96 + 368 + 320 + 128 + 64 MQTT Client REST @@ -1560,10 +1504,10 @@ drawLine(21.8,11.4,20,15) Relation - 612 - 348 - 36 - 156 + 416 + 232 + 24 + 104 lt=()- @@ -1572,10 +1516,10 @@ drawLine(21.8,11.4,20,15) Relation - 552 - 564 - 84 - 84 + 376 + 376 + 56 + 56 lt=)- fontsize=8 @@ -1586,10 +1530,10 @@ requests UMLNote - 504 - 528 - 72 - 36 + 344 + 352 + 48 + 24 Paho transparency=0 @@ -1603,10 +1547,10 @@ fontsize=10 UMLGeneric - 624 - 384 - 60 - 36 + 424 + 256 + 40 + 24 lw=0 fontsize=8 @@ -1618,10 +1562,10 @@ via MQTT Relation - 684 - 348 - 60 - 156 + 464 + 232 + 40 + 104 lt=)- 20.0;20.0;20.0;110.0 @@ -1629,10 +1573,10 @@ via MQTT UMLGeneric - 708 - 384 - 72 - 36 + 480 + 256 + 48 + 24 lw=0 fontsize=8 @@ -1644,10 +1588,10 @@ via MQTT Relation - 528 - 348 - 60 - 156 + 360 + 232 + 40 + 104 lt=)- 20.0;20.0;20.0;110.0 @@ -1655,10 +1599,10 @@ via MQTT UMLGeneric - 552 - 384 - 60 - 36 + 376 + 256 + 40 + 24 lw=0 fontsize=8 @@ -1670,10 +1614,10 @@ via MQTT Relation - 684 - 564 - 72 - 84 + 464 + 376 + 48 + 56 lt=()- send @@ -1684,10 +1628,10 @@ fontsize=8 UMLGeneric - 1980 - 912 - 192 - 96 + 1328 + 608 + 128 + 64 Script Manager symbol=component @@ -1699,10 +1643,10 @@ customelement= Relation - 2052 - 840 - 60 - 96 + 1376 + 560 + 40 + 64 lt=()- recv @@ -1713,10 +1657,10 @@ fontsize=8 Relation - 1968 - 840 - 96 - 96 + 1320 + 560 + 64 + 64 lt=)- call service @@ -1728,10 +1672,10 @@ fontsize=8 Relation - 2100 - 840 - 60 - 96 + 1408 + 560 + 40 + 64 lt=()- run @@ -1742,10 +1686,10 @@ fontsize=8 Relation - 948 - 348 - 72 - 156 + 640 + 232 + 48 + 104 lt=()- @@ -1754,10 +1698,10 @@ fontsize=8 UMLGeneric - 996 - 384 - 72 - 36 + 672 + 256 + 48 + 24 lw=0 fontsize=8 @@ -1769,10 +1713,10 @@ Tester UMLGeneric - 2220 - 732 - 192 - 96 + 1488 + 488 + 128 + 64 Time Manager @@ -1784,24 +1728,25 @@ transparency=0 Relation - 2304 - 672 - 84 - 84 + 1544 + 448 + 48 + 56 lt=()- -handle -validations +time +related +functions fontsize=8 10.0;10.0;10.0;50.0 UMLGeneric - 1980 - 1068 - 192 - 36 + 1328 + 712 + 128 + 24 Scripts symbol=artifact @@ -1818,10 +1763,10 @@ drawLine(21.8,11.4,20,15) lw=1 Relation - 2064 - 996 - 36 - 96 + 1384 + 664 + 24 + 64 lt=- fontsize=8 @@ -1831,10 +1776,10 @@ fontsize=8 UMLGeneric - 2076 - 1044 - 24 - 24 + 1392 + 696 + 16 + 16 lw=0 fontsize=12 @@ -1844,10 +1789,10 @@ fontsize=12 UMLGeneric - 1500 - 480 - 192 - 96 + 1168 + 320 + 128 + 64 Upper Tester symbol=component @@ -1858,10 +1803,10 @@ transparency=0 Relation - 1584 - 360 - 96 - 144 + 1224 + 240 + 64 + 96 lt=()- m1=Upper Tester\nrequests @@ -1871,10 +1816,10 @@ fontsize=8 Relation - 1524 - 564 - 84 - 84 + 1184 + 376 + 56 + 56 lt=)- requests @@ -1884,10 +1829,10 @@ fontsize=8 Relation - 1620 - 564 - 72 - 84 + 1248 + 376 + 48 + 56 lt=)- scripts @@ -1897,10 +1842,10 @@ fontsize=8 Relation - 2304 - 852 - 72 - 84 + 1704 + 448 + 48 + 56 lt=()- handle @@ -1912,10 +1857,10 @@ fontsize=8 UMLGeneric - 2220 - 912 - 192 - 96 + 1648 + 488 + 128 + 64 Semantic Manager @@ -1924,4 +1869,306 @@ valign=center transparency=0 + + UMLGeneric + + 1008 + 320 + 128 + 64 + + Text UI +symbol=component +valign=center +transparency=0 + + + + Relation + + 1096 + 376 + 48 + 56 + + lt=()- +logging +events +fontsize=8 + 10.0;50.0;10.0;10.0 + + + Relation + + 1024 + 376 + 64 + 56 + + lt=)- +requests +resources +scripts +fontsize=8 + 20.0;40.0;20.0;10.0 + + + Relation + + 1064 + 240 + 56 + 96 + + lt=()- +m1=handle\nuser input +fontsize=8 + 10.0;10.0;10.0;100.0 + + + UMLGeneric + + 656 + 728 + 112 + 24 + + Actions +symbol=artifact +halign=center +layer=1 +transparency=0 +fontsize=10 + + + + UMLGeneric + + 656 + 792 + 112 + 24 + + Schedules +symbol=artifact +halign=center +layer=1 +transparency=0 +fontsize=10 + + + + UMLGeneric + + 656 + 760 + 112 + 24 + + Requests +symbol=artifact +halign=center +layer=1 +transparency=0 +fontsize=10 + + + + UMLGeneric + + 360 + 704 + 144 + 184 + + Imports +transparency=0 + + + + UMLGeneric + + 368 + 728 + 128 + 24 + + Attribute Policies +symbol=artifact +halign=center +layer=1 +transparency=0 +fontsize=10 + + + + UMLGeneric + + 368 + 792 + 128 + 24 + + Attribute Policies +symbol=artifact +halign=center +layer=1 +transparency=0 +fontsize=10 + + + + UMLGeneric + + 1488 + 608 + 128 + 64 + + Onboarding +symbol=component +valign=center +transparency=0 + + + + Relation + + 1544 + 568 + 56 + 56 + + lt=()- +handle +onboarding +fontsize=8 + 10.0;10.0;10.0;50.0 + + + UMLGeneric + + 1648 + 608 + 128 + 64 + + Location +Manager +symbol=component +valign=center +transparency=0 + + + + Relation + + 1704 + 568 + 48 + 56 + + lt=()- +handle +location +updates +fontsize=8 + 10.0;10.0;10.0;50.0 + + + UMLGeneric + + 1800 + 488 + 128 + 64 + + Action +Manager +symbol=component +valign=center +transparency=0 + + + + Relation + + 1880 + 448 + 48 + 56 + + lt=()- +handle +action +updates +fontsize=8 + 10.0;10.0;10.0;50.0 + + + Relation + + 1808 + 440 + 64 + 64 + + lt=)- +call service +functions +fontsize=8 + + 20.0;20.0;20.0;60.0 + + + UMLNote + + 984 + 352 + 64 + 24 + + Textualize +transparency=0 +bg=light_gray +valign=center +halign=center +layer=1 +fontsize=10 + + + + UMLNote + + 1624 + 520 + 48 + 24 + + rdflib +transparency=0 +bg=light_gray +valign=center +halign=center +layer=1 +fontsize=10 + + + + UMLNote + + 824 + 352 + 48 + 24 + + rich +transparency=0 +bg=light_gray +valign=center +halign=center +layer=1 +fontsize=10 + + diff --git a/docs/Configuration.md b/docs/Configuration.md index 87f09936..a9a4974b 100644 --- a/docs/Configuration.md +++ b/docs/Configuration.md @@ -70,12 +70,15 @@ The following tables provide detailed descriptions of all the possible CSE confi [[http] - HTTP Server Settings](#http) [[http.security] - HTTP Security Settings](#security_http) [[http.cors] - HTTP CORS (Cross-Origin Resource Sharing) Settings](#http_cors) +[[http.wsgi] - HTTP WSGI (Web Server Gateway Interface) Settings](#http_wsgi) [[logging] - Logging Settings](#logging) [[mqtt] - MQTT Client Settings](#client_mqtt) [[mqtt.security] - MQTT Security Settings](#security_mqtt) [[resource.acp] - Resource defaults: Access Control Policies](#resource_acp) [[resource.actr] - Resource defaults: Action](#resource_actr) [[resource.cnt] - Resource Defaults: Container](#resource_cnt) +[[resource.grp] - Resource Defaults: Group](#resource_grp) +[[resource.lcp] - Resource Defaults: LocationPolicy](#resource_lcp) [[resource.req] - Resource Defaults: Request](#resource_req) [[resource.sub] - Resource Defaults: Subscription](#resource_sub) [[resource.ts] - Resource Defaults: TimeSeries](#resource_ts) @@ -164,7 +167,7 @@ The following tables provide detailed descriptions of all the possible CSE confi | Setting | Description | Configuration Name | |:--------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------| | port | Port to listen to.
Default: 8080 | http.port | -| listenIF | Interface to listen to. Use 0.0.0.0 for "all" interfaces.
Default:127.0.0.1 | http.listenIF | +| listenIF | Interface to listen to. Use 0.0.0.0 for "all" interfaces.
Default:0.0.0.0 | http.listenIF | | address | Own address. Should be a local/public reachable address.
Default: http://127.0.0.1:8080 | http.address | | root | CSE Server root. Never provide a trailing /.
Default: empty string | http.root | | enableRemoteConfiguration | Enable an endpoint for get and set certain configuration values via a REST interface.
**ATTENTION: Enabling this feature exposes configuration values, IDs and passwords, and is a security risk.**
Default: false | http.enableRemoteConfiguration | @@ -188,6 +191,10 @@ The following tables provide detailed descriptions of all the possible CSE confi | verifyCertificate | Verify certificates in requests. Set to *False* when using self-signed certificates.
Default: False | http.security.verifyCertificate | | caCertificateFile | Path and filename of the certificate file.
Default: None | http.security.caCertificateFile | | caPrivateKeyFile | Path and filename of the private key file.
Default: None | http.security.caPrivateKeyFile | +| enableBasicAuth | Enable basic authentication for the HTTP binding.
Default: false | http.security.enableBasicAuth | +| enableTokenAuth | Enable token authentication for the HTTP binding.
Default: false | http.security.enableTokenAuth | +| basicAuthFile | Path and filename of the http basic authentication file. The file must contain lines with the format "username:password". Comments are lines starting with a #.
Default: certs/http_basic_auth.txt | http.security.basicAuthFile | +| tokenAuthFile | Path and filename of the http bearer token authentication file. The file must contain lines with the format "token". Comments are lines starting with a #.
Default: certs/http_token_auth.txt | http.security.tokenAuthFile | [top](#sections) @@ -205,7 +212,21 @@ The following tables provide detailed descriptions of all the possible CSE confi [top](#sections) --- + +### [http.wsgi] - HTTP WSGI (Web Server Gateway Interface) Settings + +| Setting | Description | Configuration Name | +|:----------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------|:--------------------------| +| enable | Enable WSGI support for the HTTP binding.
Default: false | http.wsgi.enable | +| threadPoolSize | The number of threads used to process requests. This number should be of similar size as the *connectionLimit* setting.
Default: 100 | http.wsgi.threadPoolSize | +| connectionLimit | The number of possible parallel connections that can be accepted by the WSGI server. Note: One connection uses one system file descriptor.
Default: 100 | http.wsgi.connectionLimit | + + + +[top](#sections) + +--- ### [mqtt] - MQTT Client Settings @@ -213,9 +234,9 @@ The following tables provide detailed descriptions of all the possible CSE confi | Setting | Description | Configuration Name | |:------------|:------------------------------------------------------------------------------------------|:-------------------| | enable | Enable the MQTT binding.
Default: False | mqtt.enable | -| address | he hostname of the MQTT broker.
Default; 127.0.0.1 | mqtt.address | +| address | The hostname of the MQTT broker.
Default; 127.0.0.1 | mqtt.address | | port | Set the port for the MQTT broker.
Default: 1883, or 8883 for TLS | mqtt.port | -| listenIF | Interface to listen to. Use 0.0.0.0 for "all" interfaces.
Default:127.0.0.1 | mqtt.listenIF | +| listenIF | Interface to listen to. Use 0.0.0.0 for "all" interfaces.
Default:0.0.0.0 | mqtt.listenIF | | keepalive | Value for the MQTT connection's keep-alive parameter in seconds.
Default: 60 seconds | mqtt.keepalive | | topicPrefix | Optional prefix for topics.
Default: empty string | mqtt.topicPrefix | | timeout | Timeout when sending MQTT requests and waiting for responses.
Default: 10.0 seconds | mqtt.timeout | @@ -269,10 +290,11 @@ The following tables provide detailed descriptions of all the possible CSE confi | level | Loglevel. Allowed values: debug, info, warning, error, off.
See also command line argument [–log-level](Running.md).
Default: debug | logging.level | | count | Number of files for log rotation.
Default: 10 | logging.count | | size | Size per log file.
Default: 100.000 bytes | logging.size | +| maxLogMessageLength | Maximum length of a log message. Longer messages will be truncated. A value of 0 means no truncation.
Default: 1000 characters | logging.maxLogMessageLength | | stackTraceOnError | Print a stack trace when logging an 'error' level message.
Default: True | logging.stackTraceOnError | | enableBindingsLogging | Enable logging of low-level HTTP & MQTT client events.
Default: False | logging.enableBindingsLogging | | queueSize | Number of log entries that can be added to the asynchronous queue before blocking. A queue size of 0 means disabling the queue.
Default: F5000 entries | logging.queueSize | -| filter | List of component names to exclude from logging.
Default: werkzeug,markdown_it | logging.filter | +| filter | List of component names to exclude from logging.
Default: werkzeug,markdown_it | logging.filter | [top](#sections) @@ -376,6 +398,31 @@ The following tables provide detailed descriptions of all the possible CSE confi --- + + +### [resource.grp] - Resource Defaults: Group + +| Setting | Description | Configuration Name | +|:---------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------| +| resultExpirationTime | Set the time for aggregating the results of a group request before interrupting. The format is the time in ms. A value of 0 ms means no timeout.
Default: 0 ms | resource.grp.resultExpirationTime | + +[top](#sections) + +--- + + + +### [resource.lcp] - Resource Defaults: + +| Setting | Description | Configuration Name | +|:--------|:--------------------------------------------------------------------------------------|:-------------------| +| mni | Default for maxNrOfInstances for the LocationPolicy's container.
Default: 10 | resource.lcp.mni | +| mbs | Default for maxByteSize for the LocationPolicy's container.
Default: 10.000 bytes | resource.lcp.mbs | + +[top](#sections) + +--- + ### [resource.req] - Resource Defaults: Request @@ -470,6 +517,7 @@ The following tables provide detailed descriptions of all the possible CSE confi | scriptDirectories | Add one or multiple directory paths to look for scripts, in addition to the ones in the "init" directory. Must be a comma-separated list.
Default: not set | scripting.scriptDirectories | | verbose | Enable debug output during script execution, such as the current executed line.
Default: False | scripting.verbose | | fileMonitoringInterval | Set the interval to check for new files in the script (init) directory.
0 means disable monitoring. Must be >= 0.0.
Default: 2.0 seconds | scripting.fileMonitoringInterval | +| maxRuntime | Set the timeout for script execution in seconds. 0.0 seconds means no timeout.
Must be >= 0.0.
Default: 60.0 seconds | scripting.maxRuntime | [top](#sections) diff --git a/docs/Contributing.md b/docs/Contributing.md index 8bddb0f4..a2ef16ab 100644 --- a/docs/Contributing.md +++ b/docs/Contributing.md @@ -27,6 +27,6 @@ Thank you for contributed ideas, code, patches, testing, bug fixes, time, and mo [Massimo Vanetti](https://github.com/massimov) [Tyler Sengia](https://www.linkedin.com/in/tyler-sengia/) ![JiriD85](https://github.com/JiriD85.png?size=24) [JiriD85](https://github.com/JiriD85) - +![samuelbles07](https://github.com/samuelbles07.png?size=24) [samuelbles07](https://github.com/samuelbles07) [← README](../README.md) diff --git a/docs/FAQ.md b/docs/FAQ.md index 8e5691db..ec2ae93f 100644 --- a/docs/FAQ.md +++ b/docs/FAQ.md @@ -98,24 +98,37 @@ writeDelay=10 ``` + ## Web UI 1. **Can I use the web UI also with other CSE implementations?** The web UI can also be run as an independent application. Since it communicates with the CSE via the Mca interface it should be possible to use it with other CSE implementations as well as long as those third party CSEs follow the oneM2M http binding specification. It only supports the resource types that the ACME CSE supports, but at least it will present all other resource types as *unknown*. -## Console +## Console and Text UI 1. **Some of the tables, text graphics etc are not aligned or correctly displayed in the console** Some mono-spaced fonts don't work well with UTF-8 character sets and graphic elements. Especially the MS Windows *cmd.exe* console seems to have problems. Try one of the more extended fonts like *JuliaMono* or *DejaVu Sans Mono*. +1. **There is an error message "UnicodeEncodeError: 'latin-1' codec can't encode character"** + This error message is shown when the console tries to display a character that is not supported by the current console encoding. Try to set the console encoding to UTF-8 by setting the environment variable *PYTHONIOENCODING* to *utf-8*, for example: + ```bash + export PYTHONIOENCODING=utf-8 + ``` + ## Operating Systems ### RaspberryPi -1. **Restrictions** +1. **Restrictions on 32 bit Systems** Currently, the normally installed Raspbian OS is a 32 bit system. This means that several restrictions apply here, such as the maximum date supported (~2038). It needs to be determined whether these restrictions still apply when the 64 bit version of Raspbian is available. +1. **The console or the text UI is not displayed correctly** + It could be that the OS's terminal applications doesn't support rendering of extra characters, like line graphics. One recommendation on Linux systems is to install the [Mate Terminal](https://wiki.mate-desktop.org/mate-desktop/applications/mate-terminal/), which supports UTF-8 and line graphics. It also renders the output much faster. + + ```bash + sudo apt-get install mate-terminal + ``` 1. **Timing Issues** Also, the resolution of the available Python timers is rather low on Raspbian, and background tasks might not run exactly on the desired time. Unfortunately, this is also why sometimes a couple of the CSE's tests cases may fail randomly. diff --git a/docs/Importing.md b/docs/Importing.md index 0afa69c1..95531663 100644 --- a/docs/Importing.md +++ b/docs/Importing.md @@ -2,14 +2,14 @@ # CSE Startup, Importing Resources and Other Settings -[Resources](#resources) +[Initial Resources](#resources) [Attribute and Hierarchy Policies for FlexContainer Specializations](#flexcontainers) [Attribute Policies for Common Resources and Complex Types](#attributes) [Help Documentation](#help-documentation) -## Resources +## Initial Resources During CSE startup and restart it is necessary to import a first set of resources to the CSE. This is done automatically by the CSE by running a script that has the [@init](ACMEScript-metatags.md#meta_init) meta tag set. By default this is the [init.as](../init/init.as) script from the [init](../init) directory. @@ -273,12 +273,15 @@ The format for enumeration data type definitions is a bit simpler: // The attributePolicy.ep file contains a dictionary of enumeration data types { - // Each enumeration definition is identified by its name + // Each enumeration definition is identified by its name. It is a dictionary. "enumerationType": { - // Each definition can only contain a the following attribute (definition see above) + // A single enumeration definition is key value pair. The key is the enumeration + // value, the value is the interpretation of that value. + "" : "" - "evalues" : ... + // This defines a range of values. Each one gets the same interpretation assigned. + ".." : "" } } ``` diff --git a/docs/Installation.md b/docs/Installation.md index 405d4a6a..a87c8279 100644 --- a/docs/Installation.md +++ b/docs/Installation.md @@ -4,7 +4,7 @@ ### Python -ACME requires **Python 3.8** or newer. Install it with your favorite package manager. +ACME requires **Python 3.10** or newer. Install it with your favorite package manager. You may consider to use a virtual environment manager like pyenv + virtualenv (see, for example, [this tutorial](https://realpython.com/python-virtual-environments-a-primer/)). @@ -25,7 +25,13 @@ You may consider to use a virtual environment manager like pyenv + virtualenv (s python3 -m pip install cbor2 flask flask-cors InquirerPy isodate paho-mqtt plotext rdflib requests rich tinydb 1. Run the CSE for the first time. -If no configuration file is found then an interactive configuration process is started. The +You can start the CSE by simply running it from the command line: + + python3 -m acme + + Please refer to the [Running](Running.md) documentation for more detailed instructions how to start and run the ACME CSE. + + If no configuration file is found then an interactive configuration process is started. The configuration is saved to a configuration file. e.g. *acme.ini* by default.   ![](images/bootstrapConfig.gif) @@ -35,15 +41,6 @@ configuration is saved to a configuration file. e.g. *acme.ini* by default. See the [Configuration](docs/Configuration.md) documentation for further details, and the defaults configuration file [acme.ini.default](../acme.ini.default). - -## Running the CSE - -You can start the CSE by simply running it from the command line: - - python3 -m acme - -Please refer to the [Running](Running.md) documentation for more detailed instructions how to start and run the ACME CSE. - --- ## Certificates and Support for https @@ -77,7 +74,9 @@ The following third-party components are used by the ACME CSE. - [rdflib](https://github.com/RDFLib/rdflib) is a Python library for working with RDF. BSD 3-Clause License. - The CSE uses the [Requests](https://requests.readthedocs.io) HTTP Library to send requests vi http. Apache2 License - The CSE uses the [Rich](https://github.com/willmcgugan/rich) text formatter library to format various terminal output. MIT License +- [shapely](https://github.com/shapely/shapely) is a library for manipulation and analysis of geometric objects. BSD 3-Clause License - [Textual](https://github.com/textualize/textual) is a Rapid Application Development framework for to build textual user interfaces in Python. MIT License +- [waitress](https://github.com/Pylons/waitress) is a production-quality pure-Python WSGI server with very acceptable performance. ZPL 2.1 License - To store resources the CSE uses the lightweight [TinyDB](https://github.com/msiemens/tinydb) document database. MIT License diff --git a/docs/Running.md b/docs/Running.md index a2ba7b21..e7518fb1 100644 --- a/docs/Running.md +++ b/docs/Running.md @@ -18,23 +18,24 @@ configuration process](Installation.md#first_setup) is started if the configurat In additions, you can provide additional command line arguments that will override the respective settings from the configuration file: -| Command Line Argument | Description | -|:--------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------| -| -h, --help | Show a help message and exit. | -| --http, --https | Run the CSE with http or https server.
This overrides the [useTLS](Configuration.md#security) configuration setting. | -| --config <filename> | Specify a configuration file that is used instead of the default (*acme.ini*) one. | -| --db-reset | Reset and clear the database when starting the CSE. | -| --db-storage {memory,disk} | Specify the DB\'s storage mode.
This overrides the [inMemory](Configuration.md#database) configuration setting. | -| --headless | Operate the CSE in headless mode. This disables almost all screen output and also the build-in console interface. | -| --http-address <server URL> | Specify the CSE\'s http server URL.
This overrides the [address](Configuration.md#http_server) configuration setting. | -| --http-port <http port> | Specify the CSE\'s http server port.
This overrides the [address](Configuration.md#http_port) configuration setting. | -| --import-directory <directory> | Specify the import directory.
This overrides the [resourcesPath](Configuration.md#general) configuration setting. | -| --network-interface <ip address | Specify the network interface/IP address to bind to.
This overrides the [listenIF](Configuration.md#server_http) configuration setting. | -| --log-level {info, error, warn, debug, off} | Set the log level, or turn logging off.
This overrides the [level](Configuration.md#logging) configuration setting. | -| --mqtt, --no-mqtt | Enable or disable the MQTT binding.
This overrides MQTT's [enable](Configuration.md#client_mqtt) configuration setting. | -| --remote-cse, --no-remote-cse | Enable or disable remote CSE connections and checking.
This overrides the [enableRemoteCSE](Configuration.md#general) configuration setting. | -| --statistics, --no-statistics | Enable or disable collecting CSE statistics.
This overrides the [enable](Configuration.md#statistics) configuration setting. | -| --textui | Run the CSE's text UI after startup. | +| Command Line Argument | Description | +|:--------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------| +| -h, --help | Show a help message and exit. | +| --config <filename> | Specify a configuration file that is used instead of the default (*acme.ini*) one. | +| --db-reset | Reset and clear the database when starting the CSE. | +| --db-storage {memory,disk} | Specify the DB\'s storage mode.
This overrides the [inMemory](Configuration.md#database) configuration setting. | +| --headless | Operate the CSE in headless mode. This disables almost all screen output and also the build-in console interface. | +| --http, --https | Run the CSE with http or https server.
This overrides the [useTLS](Configuration.md#security) configuration setting. | +| --http-wsgi | Run CSE with http WSGI support.
This overrides the [http.wsgi.enable]() configuration setting. | +| --http-address <server URL> | Specify the CSE\'s http server URL.
This overrides the [address](Configuration.md#http_server) configuration setting. | +| --http-port <http port> | Specify the CSE\'s http server port.
This overrides the [address](Configuration.md#http_port) configuration setting. | +| --import-directory <directory> | Specify the import directory.
This overrides the [resourcesPath](Configuration.md#general) configuration setting. | +| --network-interface <ip address | Specify the network interface/IP address to bind to.
This overrides the [listenIF](Configuration.md#server_http) configuration setting. | +| --log-level {info, error, warn, debug, off} | Set the log level, or turn logging off.
This overrides the [level](Configuration.md#logging) configuration setting. | +| --mqtt, --no-mqtt | Enable or disable the MQTT binding.
This overrides MQTT's [enable](Configuration.md#client_mqtt) configuration setting. | +| --remote-cse, --no-remote-cse | Enable or disable remote CSE connections and checking.
This overrides the [enableRemoteCSE](Configuration.md#general) configuration setting. | +| --statistics, --no-statistics | Enable or disable collecting CSE statistics.
This overrides the [enable](Configuration.md#statistics) configuration setting. | +| --textui | Run the CSE's text UI after startup. | diff --git a/docs/Supported.md b/docs/Supported.md index 089e8ab6..4cf08571 100644 --- a/docs/Supported.md +++ b/docs/Supported.md @@ -41,13 +41,15 @@ The ACME CSE supports the following oneM2M resource types: | FlexContainer & Specializations | ✓ | Any specialization is supported and validated. See [Importing Attribute Policies](Importing.md#attributes) for further details.
Supported specializations include: TS-0023 R4, GenericInterworking, AllJoyn. | | FlexContainerInstance | ✓ | Experimental. This is an implementation of the draft FlexContainerInstance specification. | | Group (GRP) | ✓ | The support includes requests via the *fopt* (fanOutPoint) virtual resource. Groups may contain remote resources. | +| LocationPolicy (LCP) | ✓ | Only *device based* location policy is supported. The LCP's *cnt* stores geo-coordinates and geo-fencing results. | | Management Objects | ✓ | See also the list of supported [management objects](#mgmtobjs). | | Node (NOD) | ✓ | | | Polling Channel (PCH) | ✓ | Support for Request and Notification long-polling via the *pcu* (pollingChannelURI) virtual resource. *requestAggregation* functionality is supported, too. | | Remote CSE (CSR) | ✓ | Announced resources are supported. Transit request to resources on registered CSE's are supported. | | Request (REQ) | ✓ | Support for non-blocking requests. | +| Schedule (SCH) | ✓ | Support for CSE communication, nodes, subscriptions and crossResourceSubscriptions. | | SemanticDescriptor (SMD) | ✓ | Support for basic resource handling and semantic queries. | -| Subscription (SUB) | ✓ | Notifications via http(s) (direct url or an AE's Point-of-Access (POA)). BatchNotifications, attributes. Not all features are supported yet. | +| Subscription (SUB) | ✓ | Notifications via http(s) (direct url or an AE's Point-of-Access (POA)). BatchNotifications, attributes, statistics. Not all features are supported yet. | | TimeSeries (TS) | ✓ | Including missing data notifications. | | TimeSeriesInstance (TSI) | ✓ | *dataGenerationTime* attribute only supports absolute timestamps. | | TimeSyncBeacon (TSB) | ✓ | Experimental. Implemented functionality might change according to specification changes. | @@ -76,41 +78,46 @@ The following table presents the supported management object specifications. ## oneM2M Service Features -| Functionality | Supported | Remark | -|:------------------------------|:---------:|:------------------------------------------------------------------------------------------| -| AE registration | ✓ | | -| Blocking requests | ✓ | | -| Delayed request execution | ✓ | Through the *Operation Execution Timestamp* request attribute. | -| Discovery | ✓ | | -| Long polling | ✓ | Long polling for request unreachable AEs and CSEs through <pollingChannel>. | -| Non-blocking requests | ✓ | Non-blocking synchronous and asynchronous, and flex-blocking, incl. *Result Persistence*. | -| Notifications | ✓ | E.g. for subscriptions and non-blocking requests. | -| Partial Retrieve | ✓ | Support for partial retrieve of individual resource attributes. | -| Remote CSE registration | ✓ | | -| Request expiration | ✓ | Through the *Request Expiration Timestamp* request attribute | -| Request forwarding | ✓ | Forwarding requests from one CSE to another. | -| Request parameter validations | ✓ | | -| Resource addressing | ✓ | *CSE-Relative*, *SP-Relative* and *Absolute* as well as hybrid addressing are supported. | -| Resource announcements | ✓ | Under the CSEBaseAnnc resource (R4 feature). Bi-directional update sync. | -| Resource expiration | ✓ | | -| Resource validations | ✓ | | -| Semantics | ✓ | Basic support for semantic descriptors and semantic queries and discovery. | -| Standard oneM2M requests | ✓ | CREATE, RETRIEVE, UPDATE, DELETE, NOTIFY | -| Subscriptions | ✓ | Incl. batch notification, and resource type and attribute filtering. | -| Time Synchronization | ✓ | | -| TimeSeries data handling | ✓ | Incl. missing data detection, monitoring and notifications. | +| Functionality | Supported | Remark | +|:------------------------------|:---------:|:-------------------------------------------------------------------------------------------------------------------------------------------| +| AE registration | ✓ | | +| Blocking requests | ✓ | | +| Delayed request execution | ✓ | Through the *Operation Execution Timestamp* request attribute. | +| Discovery | ✓ | | +| Geo-query | ✓ | | +| Location | ✓ | Only *device based, and no *network based* location policies are supported. | +| Long polling | ✓ | Long polling for request unreachable AEs and CSEs through <pollingChannel>. | +| Non-blocking requests | ✓ | Non-blocking synchronous and asynchronous, and flex-blocking, incl. *Result Persistence*. | +| Notifications | ✓ | E.g. for subscriptions and non-blocking requests. | +| Partial Retrieve | ✓ | Support for partial retrieve of individual resource attributes. | +| Remote CSE registration | ✓ | | +| Request expiration | ✓ | The *Request Expiration Timestamp* request attribute | +| Request forwarding | ✓ | Forwarding requests from one CSE to another. | +| Request parameter validations | ✓ | | +| Resource addressing | ✓ | *CSE-Relative*, *SP-Relative* and *Absolute* as well as hybrid addressing are supported. | +| Resource announcements | ✓ | Under the CSEBaseAnnc resource (R4 feature). Bi-directional update sync. | +| Resource expiration | ✓ | | +| Resource validations | ✓ | | +| Result expiration | ✓ | The *Result Expiration Timestamp* request attribute. Result timeouts for non-blocking requests depend on the resource expiration interval. | +| Semantics | ✓ | Basic support for semantic descriptors and semantic queries and discovery. | +| Standard oneM2M requests | ✓ | CREATE, RETRIEVE, UPDATE, DELETE, NOTIFY | +| Subscriptions | ✓ | Incl. batch notification, and resource type and attribute filtering. | +| Time Synchronization | ✓ | | +| TimeSeries data handling | ✓ | Incl. missing data detection, monitoring and notifications. | ### Additional CSE Features -| Functionality | Remark | -|:----------------------|:----------------------------------------------------------------------------------------------------------| -| HTTP CORS | Support for *Cross-Origin Resource Sharing* to support http(s) redirects. | -| Text Console | Control and manage the CSE, inspect resources, run scripts in a text console. | -| Test UI | Text-based UI to inspect resources and requests, configurations, stats, and more | -| Testing: Upper Tester | Basic support for the Upper Tester protocol defined in TS-0019, and additional command execution support. | -| Request Recording | Record requests to and from the CSE to learn and debug requests over Mca and Mcc. | -| Script Interpreter | Lisp-based scripting support to extent functionalities, implement simple AEs, prototypes, test, ... | -| Web UI | | +| Functionality | Remark | +|:----------------------|:-----------------------------------------------------------------------------------------------------------------------------| +| HTTP CORS | Support for *Cross-Origin Resource Sharing* to support http(s) redirects. | +| HTTP Authorization | Basic support for *basic* and *bearer* (token) authorization. | +| HTTP WSGI | Support for the Python *Web Server Gateway Interface* to improve integration with a reverse proxy or API gateway, ie. Nginx. | +| Text Console | Control and manage the CSE, inspect resources, run scripts in a text console. | +| Test UI | Text-based UI to inspect resources and requests, configurations, stats, and more | +| Testing: Upper Tester | Basic support for the Upper Tester protocol defined in TS-0019, and additional command execution support. | +| Request Recording | Record requests to and from the CSE to learn and debug requests over Mca and Mcc. | +| Script Interpreter | Lisp-based scripting support to extent functionalities, implement simple AEs, prototypes, test, ... | +| Web UI | | ### Experimental CSE Features @@ -162,12 +169,12 @@ The following result contents are implemented for standard oneM2M requests & dis ## Protocols Bindings The following Protocol Bindings are supported: -| Protocol Binding | Supported | Remark | -|:-----------------|:---------:|:--------------------------------------------------------------------------------------------------------| -| http | ✓ | incl. TLS (https) and CORS support.
Experimental: Using PATCH to replace missing DELETE in http/1.0 | -| coap | ✗ | | -| mqtt | ✓ | incl. mqtts | -| WebSocket | ✗ | | +| Protocol Binding | Supported | Remark | +|:-----------------|:---------:|:----------------------------------------------------------------------------------------------------------------------------------------------| +| http | ✓ | incl. TLS (https) and CORS support. *basic* and *bearer* authentication.
Experimental: Using PATCH to replace missing DELETE in http/1.0 | +| coap | ✗ | | +| mqtt | ✓ | incl. mqtts | +| WebSocket | ✗ | | The supported bindings can be used together, and combined and mixed in any way. diff --git a/docs/images/cse_uml.png b/docs/images/cse_uml.png index d4ae6719..c9a0f047 100644 Binary files a/docs/images/cse_uml.png and b/docs/images/cse_uml.png differ diff --git a/docs/images/db_schemas.png b/docs/images/db_schemas.png index 1bee635f..eee5b866 100644 Binary files a/docs/images/db_schemas.png and b/docs/images/db_schemas.png differ diff --git a/docs/images/db_schemas.uxf b/docs/images/db_schemas.uxf index dfe43bb6..03db9da1 100644 --- a/docs/images/db_schemas.uxf +++ b/docs/images/db_schemas.uxf @@ -28,9 +28,9 @@ fontfamily=Monospaced UMLClass - 208 - 120 - 408 + 336 + 248 + 416 80 resources @@ -47,9 +47,9 @@ layer=1 UMLClass - 208 - 272 - 408 + 336 + 400 + 416 104 identifiers @@ -68,9 +68,9 @@ layer=1 UMLClass - 208 - 392 - 408 + 336 + 520 + 416 80 srn @@ -87,9 +87,9 @@ layer=1 UMLClass - 208 - 488 - 408 + 336 + 616 + 416 88 children @@ -107,9 +107,9 @@ layer=1 UMLPackage - 192 - 240 - 440 + 320 + 368 + 448 352 identifiers-<csi>.json @@ -120,10 +120,10 @@ bg=#dddddd UMLClass - 208 - 640 - 408 - 232 + 336 + 768 + 416 + 248 subscriptions -- @@ -138,6 +138,7 @@ PK ri : string // subscription resourceID nus : list of string // notification URLs bn : batchNotify struct // batch notification cr : string // creator + nec : integer // notification event category org : string // originator ma : timestamp // maxAge nse : boolean // notificationStats enabled @@ -151,10 +152,10 @@ layer=1 UMLPackage - 192 - 608 - 440 - 280 + 320 + 736 + 448 + 296 subscriptions-<csi>.json fg=gray @@ -164,9 +165,9 @@ bg=#dddddd UMLClass - 672 - 464 - 408 + 816 + 592 + 416 104 batchNotifications @@ -185,9 +186,9 @@ layer=1 UMLClass - 712 - 960 - 408 + 1368 + 792 + 448 64 halign=left @@ -198,9 +199,9 @@ Why extra structs? Cheaper than full resources UMLPackage - 656 - 432 - 440 + 800 + 560 + 448 152 batchNotifications-<csi>.json @@ -211,9 +212,9 @@ bg=#dddddd UMLPackage - 656 - 88 - 440 + 800 + 216 + 448 112 statistics-<csi>.json @@ -224,9 +225,9 @@ bg=#dddddd UMLClass - 672 - 120 - 408 + 816 + 248 + 416 64 statistics @@ -242,9 +243,9 @@ layer=1 UMLClass - 672 - 248 - 408 + 816 + 376 + 416 152 actions @@ -267,9 +268,9 @@ layer=1 UMLPackage - 656 - 216 - 440 + 800 + 344 + 448 200 actions-<csi>.json @@ -280,10 +281,10 @@ bg=#dddddd UMLClass - 672 - 632 - 408 - 168 + 816 + 760 + 416 + 184 requests -- @@ -294,6 +295,7 @@ PK ts : float // UTC timestamp op : Operation // request operation rsc : status code // Response Status Code out : boolean // CSE initiated request + ot : string // operation timestamp req : JSON // request JSON rsp : JSON // response JSON -- @@ -306,10 +308,10 @@ layer=1 UMLPackage - 656 - 600 - 440 - 216 + 800 + 728 + 448 + 232 requests -<csi>.json fg=gray @@ -319,13 +321,46 @@ bg=#dddddd UMLPackage - 192 - 88 - 440 + 320 + 216 + 448 136 resources-<csi>.json fg=gray +bg=#dddddd + + + + UMLClass + + 816 + 1008 + 416 + 96 + + schedules +-- +PK ri : string // schedule's resourceID + pi : string // parent resource resourceID + sce : list of string // list of schedule timestamps +-- + +bg=#ffffff +transparency=0 +layer=1 + + + + UMLPackage + + 800 + 976 + 448 + 144 + + schedules-<csi>.json +fg=gray bg=#dddddd diff --git a/docs/images/resources_uml.png b/docs/images/resources_uml.png index ac2a7be6..4a46aa03 100644 Binary files a/docs/images/resources_uml.png and b/docs/images/resources_uml.png differ diff --git a/docs/images/resources_uml.uxf b/docs/images/resources_uml.uxf index 361030c4..6896e939 100644 --- a/docs/images/resources_uml.uxf +++ b/docs/images/resources_uml.uxf @@ -1,13 +1,13 @@ - 9 + 15 UMLClass - 585 + 1215 0 - 81 - 27 + 135 + 45 /Resource/ lt=.. @@ -16,10 +16,10 @@ lt=.. UMLClass - 360 - 72 - 36 - 27 + 840 + 120 + 60 + 45 ACP @@ -27,10 +27,10 @@ lt=.. Relation - 369 - 18 - 279 - 72 + 855 + 30 + 465 + 120 lt=<<- 290.0;10.0;290.0;40.0;10.0;40.0;10.0;60.0 @@ -38,10 +38,10 @@ lt=.. UMLClass - 990 - 72 - 135 - 27 + 1890 + 120 + 225 + 45 /AnnouncedResource/ lt=.. @@ -50,10 +50,10 @@ lt=.. UMLClass - 81 - 522 - 63 - 27 + 135 + 870 + 105 + 45 ACPAnnc @@ -61,21 +61,21 @@ lt=.. Relation - 108 - 90 - 963 - 450 + 180 + 150 + 1845 + 750 lt=<<- - 1050.0;10.0;1050.0;460.0;10.0;460.0;10.0;480.0 + 1210.0;10.0;1210.0;460.0;10.0;460.0;10.0;480.0 UMLClass - 819 - 72 - 162 - 27 + 1605 + 120 + 270 + 45 /AnnounceableResource/ lt=.. @@ -84,10 +84,10 @@ lt=.. UMLClass - 405 - 252 - 36 - 27 + 765 + 420 + 60 + 45 AE @@ -95,10 +95,10 @@ lt=.. UMLClass - 153 - 522 - 54 - 27 + 255 + 870 + 90 + 45 AEAnnc @@ -106,32 +106,32 @@ lt=.. Relation - 171 - 90 - 900 - 450 + 285 + 150 + 1740 + 750 lt=<<- - 980.0;10.0;980.0;460.0;10.0;460.0;10.0;480.0 + 1140.0;10.0;1140.0;460.0;10.0;460.0;10.0;480.0 Relation - 414 - 90 - 513 - 180 + 780 + 150 + 1005 + 300 lt=<<- - 550.0;10.0;550.0;160.0;10.0;160.0;10.0;180.0 + 650.0;10.0;650.0;160.0;10.0;160.0;10.0;180.0 UMLClass - 639 - 252 - 81 - 27 + 1230 + 420 + 135 + 45 /MgmtObj/ lt=.. @@ -140,10 +140,10 @@ lt=.. UMLClass - 297 - 432 - 45 - 27 + 735 + 720 + 75 + 45 ANDI @@ -151,32 +151,32 @@ lt=.. Relation - 315 - 270 - 378 - 180 + 765 + 450 + 555 + 300 lt=<<- - 400.0;10.0;400.0;160.0;10.0;160.0;10.0;180.0 + 350.0;10.0;350.0;160.0;10.0;160.0;10.0;180.0 Relation - 666 - 90 - 261 - 180 + 1275 + 150 + 510 + 300 lt=<<- - 270.0;10.0;270.0;160.0;10.0;160.0;10.0;180.0 + 320.0;10.0;320.0;160.0;10.0;160.0;10.0;180.0 UMLClass - 81 - 612 - 72 - 27 + 375 + 1020 + 120 + 45 ANDIAnnc @@ -184,10 +184,10 @@ lt=.. UMLClass - 666 - 522 - 99 - 27 + 1230 + 870 + 165 + 45 /MgmtObjAnnc/ lt=.. @@ -196,32 +196,32 @@ lt=.. Relation - 108 - 540 - 621 - 90 + 420 + 900 + 915 + 150 lt=<<- - 670.0;10.0;670.0;60.0;10.0;60.0;10.0;80.0 + 590.0;10.0;590.0;60.0;10.0;60.0;10.0;80.0 Relation - 702 - 90 - 369 - 450 + 1290 + 150 + 735 + 750 lt=<<- - 390.0;10.0;390.0;460.0;10.0;460.0;10.0;480.0 + 470.0;10.0;470.0;460.0;10.0;460.0;10.0;480.0 UMLClass - 351 - 432 - 36 - 27 + 825 + 720 + 60 + 45 ANI @@ -229,21 +229,21 @@ lt=.. Relation - 360 - 270 - 333 - 180 + 840 + 450 + 480 + 300 lt=<<- - 350.0;10.0;350.0;160.0;10.0;160.0;10.0;180.0 + 300.0;10.0;300.0;160.0;10.0;160.0;10.0;180.0 UMLClass - 162 - 612 - 63 - 27 + 510 + 1020 + 105 + 45 ANIAnnc @@ -251,21 +251,21 @@ lt=.. Relation - 189 - 540 - 540 - 90 + 555 + 900 + 780 + 150 lt=<<- - 580.0;10.0;580.0;60.0;10.0;60.0;10.0;80.0 + 500.0;10.0;500.0;60.0;10.0;60.0;10.0;80.0 Relation - 621 - 18 - 297 - 72 + 1275 + 30 + 495 + 120 lt=<<- 10.0;10.0;10.0;40.0;310.0;40.0;310.0;60.0 @@ -273,10 +273,10 @@ lt=.. Relation - 621 - 18 - 450 - 72 + 1275 + 30 + 750 + 120 lt=<<- 10.0;10.0;10.0;40.0;480.0;40.0;480.0;60.0 @@ -284,10 +284,10 @@ lt=.. UMLClass - 396 - 432 - 36 - 27 + 900 + 720 + 60 + 45 BAT @@ -295,21 +295,21 @@ lt=.. Relation - 405 - 270 - 288 - 180 + 915 + 450 + 405 + 300 lt=<<- - 300.0;10.0;300.0;160.0;10.0;160.0;10.0;180.0 + 250.0;10.0;250.0;160.0;10.0;160.0;10.0;180.0 UMLClass - 234 - 612 - 63 - 27 + 630 + 1020 + 105 + 45 BATAnnc @@ -317,21 +317,21 @@ lt=.. Relation - 261 - 540 - 468 - 90 + 675 + 900 + 660 + 150 lt=<<- - 500.0;10.0;500.0;60.0;10.0;60.0;10.0;80.0 + 420.0;10.0;420.0;60.0;10.0;60.0;10.0;80.0 UMLClass - 450 - 252 - 36 - 27 + 840 + 420 + 60 + 45 CIN @@ -339,21 +339,21 @@ lt=.. Relation - 459 - 90 - 468 - 180 + 855 + 150 + 930 + 300 lt=<<- - 500.0;10.0;500.0;160.0;10.0;160.0;10.0;180.0 + 600.0;10.0;600.0;160.0;10.0;160.0;10.0;180.0 UMLClass - 216 - 522 - 63 - 27 + 360 + 870 + 105 + 45 CINAnnc @@ -361,21 +361,21 @@ lt=.. Relation - 243 - 90 - 828 - 450 + 405 + 150 + 1620 + 750 lt=<<- - 900.0;10.0;900.0;460.0;10.0;460.0;10.0;480.0 + 1060.0;10.0;1060.0;460.0;10.0;460.0;10.0;480.0 UMLClass - 891 - 342 - 36 - 27 + 1725 + 570 + 60 + 45 CNT @@ -383,10 +383,10 @@ lt=.. Relation - 900 - 270 - 81 - 90 + 1740 + 450 + 135 + 150 lt=<<- 70.0;10.0;70.0;60.0;10.0;60.0;10.0;80.0 @@ -394,10 +394,10 @@ lt=.. UMLClass - 342 - 162 - 54 - 27 + 810 + 270 + 90 + 45 CNT_LA @@ -405,10 +405,10 @@ lt=.. UMLClass - 405 - 162 - 54 - 27 + 915 + 270 + 90 + 45 CNT_OL @@ -416,10 +416,10 @@ lt=.. Relation - 360 - 90 - 414 - 90 + 840 + 150 + 690 + 150 lt=<<- 440.0;10.0;440.0;60.0;10.0;60.0;10.0;80.0 @@ -427,10 +427,10 @@ lt=.. Relation - 423 - 90 - 351 - 90 + 945 + 150 + 585 + 150 lt=<<- 370.0;10.0;370.0;60.0;10.0;60.0;10.0;80.0 @@ -438,10 +438,10 @@ lt=.. UMLClass - 288 - 522 - 63 - 27 + 480 + 870 + 105 + 45 CNTAnnc @@ -449,21 +449,21 @@ lt=.. Relation - 315 - 90 - 756 - 450 + 525 + 150 + 1500 + 750 lt=<<- - 820.0;10.0;820.0;460.0;10.0;460.0;10.0;480.0 + 980.0;10.0;980.0;460.0;10.0;460.0;10.0;480.0 UMLClass - 450 - 72 - 54 - 27 + 990 + 120 + 90 + 45 CSEBase @@ -471,10 +471,10 @@ lt=.. Relation - 468 - 18 - 180 - 72 + 1020 + 30 + 300 + 120 lt=<<- 180.0;10.0;180.0;40.0;10.0;40.0;10.0;60.0 @@ -482,10 +482,10 @@ lt=.. UMLClass - 495 - 252 - 36 - 27 + 915 + 420 + 60 + 45 CSR @@ -493,10 +493,10 @@ lt=.. UMLClass - 360 - 522 - 63 - 27 + 600 + 870 + 105 + 45 CSRAnnc @@ -504,21 +504,21 @@ lt=.. Relation - 387 - 90 - 684 - 450 + 645 + 150 + 1380 + 750 lt=<<- - 740.0;10.0;740.0;460.0;10.0;460.0;10.0;480.0 + 900.0;10.0;900.0;460.0;10.0;460.0;10.0;480.0 UMLClass - 495 - 432 - 36 - 27 + 1065 + 720 + 60 + 45 DVC @@ -526,21 +526,21 @@ lt=.. Relation - 504 - 270 - 189 - 180 + 1080 + 450 + 240 + 300 lt=<<- - 190.0;10.0;190.0;160.0;10.0;160.0;10.0;180.0 + 140.0;10.0;140.0;160.0;10.0;160.0;10.0;180.0 UMLClass - 387 - 612 - 63 - 27 + 885 + 1020 + 105 + 45 DVCAnnc @@ -548,32 +548,32 @@ lt=.. Relation - 414 - 540 - 315 - 90 + 930 + 900 + 405 + 150 lt=<<- - 330.0;10.0;330.0;60.0;10.0;60.0;10.0;80.0 + 250.0;10.0;250.0;60.0;10.0;60.0;10.0;80.0 Relation - 549 - 270 - 144 - 180 + 1155 + 450 + 165 + 300 lt=<<- - 140.0;10.0;140.0;160.0;10.0;160.0;10.0;180.0 + 90.0;10.0;90.0;160.0;10.0;160.0;10.0;180.0 UMLClass - 540 - 432 - 36 - 27 + 1140 + 720 + 60 + 45 DVI @@ -581,10 +581,10 @@ lt=.. UMLClass - 459 - 612 - 63 - 27 + 1005 + 1020 + 105 + 45 DVIAnnc @@ -592,21 +592,21 @@ lt=.. Relation - 477 - 540 - 252 - 90 + 1035 + 900 + 300 + 150 lt=<<- - 260.0;10.0;260.0;60.0;10.0;60.0;10.0;80.0 + 180.0;10.0;180.0;60.0;10.0;60.0;10.0;80.0 UMLClass - 585 - 432 - 36 - 27 + 1215 + 720 + 60 + 45 EVL @@ -614,21 +614,21 @@ lt=.. Relation - 594 - 270 - 99 - 180 + 1230 + 450 + 90 + 300 lt=<<- - 90.0;10.0;90.0;160.0;10.0;160.0;10.0;180.0 + 40.0;10.0;40.0;160.0;10.0;160.0;10.0;180.0 UMLClass - 531 - 612 - 63 - 27 + 1125 + 1020 + 105 + 45 EVLAnnc @@ -636,21 +636,21 @@ lt=.. Relation - 558 - 540 - 171 - 90 + 1170 + 900 + 165 + 150 lt=<<- - 170.0;10.0;170.0;60.0;10.0;60.0;10.0;80.0 + 90.0;10.0;90.0;60.0;10.0;60.0;10.0;80.0 UMLClass - 513 - 72 - 36 - 27 + 1095 + 120 + 60 + 45 FCI @@ -658,10 +658,10 @@ lt=.. Relation - 522 - 18 - 126 - 72 + 1110 + 30 + 210 + 120 lt=<<- 120.0;10.0;120.0;40.0;10.0;40.0;10.0;60.0 @@ -669,10 +669,10 @@ lt=.. Relation - 954 - 270 - 27 - 90 + 1830 + 450 + 45 + 150 lt=<<- 10.0;10.0;10.0;80.0 @@ -680,10 +680,10 @@ lt=.. UMLClass - 936 - 342 - 45 - 27 + 1800 + 570 + 75 + 45 FCNT @@ -691,10 +691,10 @@ lt=.. UMLClass - 468 - 162 - 63 - 27 + 1020 + 270 + 105 + 45 FCNT_LA @@ -702,10 +702,10 @@ lt=.. UMLClass - 540 - 162 - 63 - 27 + 1140 + 270 + 105 + 45 FCNT_OL @@ -713,10 +713,10 @@ lt=.. Relation - 639 - 90 - 135 - 90 + 1305 + 150 + 225 + 150 lt=<<- 130.0;10.0;130.0;60.0;10.0;60.0;10.0;80.0 @@ -724,10 +724,10 @@ lt=.. Relation - 720 - 90 - 54 - 90 + 1440 + 150 + 90 + 150 lt=<<- 40.0;10.0;40.0;60.0;10.0;60.0;10.0;80.0 @@ -735,10 +735,10 @@ lt=.. UMLClass - 513 - 522 - 72 - 27 + 855 + 870 + 120 + 45 FCNTAnnc @@ -746,21 +746,21 @@ lt=.. Relation - 540 - 90 - 531 - 450 + 900 + 150 + 1125 + 750 lt=<<- - 570.0;10.0;570.0;460.0;10.0;460.0;10.0;480.0 + 730.0;10.0;730.0;460.0;10.0;460.0;10.0;480.0 UMLClass - 630 - 432 - 36 - 27 + 1290 + 720 + 60 + 45 FWR @@ -768,10 +768,10 @@ lt=.. UMLClass - 675 - 432 - 36 - 27 + 1365 + 720 + 60 + 45 MEM @@ -779,21 +779,21 @@ lt=.. Relation - 639 - 270 - 54 - 180 + 1275 + 450 + 75 + 300 lt=<<- - 40.0;10.0;40.0;160.0;10.0;160.0;10.0;180.0 + 10.0;10.0;10.0;160.0;30.0;160.0;30.0;180.0 UMLClass - 603 - 612 - 63 - 27 + 1245 + 1020 + 105 + 45 FWRAnnc @@ -801,10 +801,10 @@ lt=.. UMLClass - 594 - 252 - 36 - 27 + 1080 + 420 + 60 + 45 GRP @@ -812,21 +812,21 @@ lt=.. Relation - 603 - 90 - 324 - 180 + 1095 + 150 + 690 + 300 lt=<<- - 340.0;10.0;340.0;160.0;10.0;160.0;10.0;180.0 + 440.0;10.0;440.0;160.0;10.0;160.0;10.0;180.0 UMLClass - 612 - 162 - 72 - 27 + 1260 + 270 + 120 + 45 GRP_FOPT @@ -834,10 +834,10 @@ lt=.. UMLClass - 594 - 522 - 63 - 27 + 990 + 870 + 105 + 45 GRPAnnc @@ -845,32 +845,32 @@ lt=.. Relation - 666 - 270 - 45 - 180 + 1275 + 450 + 150 + 300 lt=<<- - 10.0;10.0;10.0;160.0;30.0;160.0;30.0;180.0 + 10.0;10.0;10.0;160.0;80.0;160.0;80.0;180.0 Relation - 630 - 540 - 99 - 90 + 1290 + 900 + 45 + 150 lt=<<- - 90.0;10.0;90.0;60.0;10.0;60.0;10.0;80.0 + 10.0;10.0;10.0;80.0 UMLClass - 675 - 612 - 72 - 27 + 1365 + 1020 + 120 + 45 MEMAnnc @@ -878,21 +878,21 @@ lt=.. Relation - 702 - 540 - 27 - 90 + 1290 + 900 + 165 + 150 lt=<<- - 10.0;10.0;10.0;80.0 + 10.0;10.0;10.0;60.0;90.0;60.0;90.0;80.0 Relation - 747 - 90 - 63 - 90 + 1485 + 150 + 105 + 150 lt=<<- 10.0;10.0;10.0;60.0;50.0;60.0;50.0;80.0 @@ -900,10 +900,10 @@ lt=.. UMLClass - 729 - 252 - 36 - 27 + 1380 + 420 + 60 + 45 NOD @@ -911,21 +911,21 @@ lt=.. Relation - 738 - 90 - 189 - 180 + 1395 + 150 + 390 + 300 lt=<<- - 190.0;10.0;190.0;160.0;10.0;160.0;10.0;180.0 + 240.0;10.0;240.0;160.0;10.0;160.0;10.0;180.0 UMLClass - 774 - 522 - 63 - 27 + 1410 + 870 + 105 + 45 NODAnnc @@ -933,32 +933,32 @@ lt=.. Relation - 621 - 90 - 450 - 450 + 1035 + 150 + 990 + 750 lt=<<- - 480.0;10.0;480.0;460.0;10.0;460.0;10.0;480.0 + 640.0;10.0;640.0;460.0;10.0;460.0;10.0;480.0 Relation - 801 - 90 - 270 - 450 + 1455 + 150 + 570 + 750 lt=<<- - 280.0;10.0;280.0;460.0;10.0;460.0;10.0;480.0 + 360.0;10.0;360.0;460.0;10.0;460.0;10.0;480.0 UMLClass - 720 - 432 - 45 - 27 + 1440 + 720 + 75 + 45 NYCFC @@ -966,10 +966,10 @@ lt=.. UMLClass - 756 - 612 - 81 - 27 + 1500 + 1020 + 135 + 45 NYCFCAnnc @@ -977,21 +977,21 @@ lt=.. Relation - 702 - 540 - 108 - 90 + 1290 + 900 + 300 + 150 lt=<<- - 10.0;10.0;10.0;60.0;100.0;60.0;100.0;80.0 + 10.0;10.0;10.0;60.0;180.0;60.0;180.0;80.0 UMLClass - 774 - 432 - 36 - 27 + 1530 + 720 + 60 + 45 RBO @@ -999,32 +999,32 @@ lt=.. Relation - 666 - 270 - 99 - 180 + 1275 + 450 + 240 + 300 lt=<<- - 10.0;10.0;10.0;160.0;90.0;160.0;90.0;180.0 + 10.0;10.0;10.0;160.0;140.0;160.0;140.0;180.0 Relation - 666 - 270 - 144 - 180 + 1275 + 450 + 315 + 300 lt=<<- - 10.0;10.0;10.0;160.0;140.0;160.0;140.0;180.0 + 10.0;10.0;10.0;160.0;190.0;160.0;190.0;180.0 UMLClass - 846 - 612 - 63 - 27 + 1650 + 1020 + 105 + 45 RBOAnnc @@ -1032,21 +1032,21 @@ lt=.. Relation - 702 - 540 - 198 - 90 + 1290 + 900 + 450 + 150 lt=<<- - 10.0;10.0;10.0;60.0;200.0;60.0;200.0;80.0 + 10.0;10.0;10.0;60.0;280.0;60.0;280.0;80.0 UMLClass - 648 - 72 - 36 - 27 + 1320 + 120 + 60 + 45 SUB @@ -1054,10 +1054,10 @@ lt=.. Relation - 612 - 18 - 36 - 72 + 1260 + 30 + 60 + 120 lt=<<- 20.0;10.0;20.0;40.0;10.0;40.0;10.0;60.0 @@ -1065,10 +1065,10 @@ lt=.. UMLClass - 819 - 432 - 36 - 27 + 1605 + 720 + 60 + 45 SWR @@ -1076,21 +1076,21 @@ lt=.. Relation - 666 - 270 - 189 - 180 + 1275 + 450 + 390 + 300 lt=<<- - 10.0;10.0;10.0;160.0;190.0;160.0;190.0;180.0 + 10.0;10.0;10.0;160.0;240.0;160.0;240.0;180.0 UMLClass - 918 - 612 - 63 - 27 + 1770 + 1020 + 105 + 45 SWRAnnc @@ -1098,21 +1098,21 @@ lt=.. Relation - 702 - 540 - 270 - 90 + 1290 + 900 + 570 + 150 lt=<<- - 10.0;10.0;10.0;60.0;280.0;60.0;280.0;80.0 + 10.0;10.0;10.0;60.0;360.0;60.0;360.0;80.0 Relation - 621 - 18 - 63 - 72 + 1275 + 30 + 105 + 120 lt=<<- 10.0;10.0;10.0;40.0;50.0;40.0;50.0;60.0 @@ -1120,10 +1120,10 @@ lt=.. UMLClass - 603 - 72 - 36 - 27 + 1245 + 120 + 60 + 45 REQ @@ -1131,10 +1131,10 @@ lt=.. UMLClass - 990 - 342 - 36 - 27 + 1890 + 570 + 60 + 45 TS @@ -1142,10 +1142,10 @@ lt=.. UMLClass - 864 - 252 - 36 - 27 + 1680 + 420 + 60 + 45 TSI @@ -1153,10 +1153,10 @@ lt=.. Relation - 954 - 270 - 72 - 90 + 1830 + 450 + 120 + 150 lt=<<- 10.0;10.0;10.0;60.0;60.0;60.0;60.0;80.0 @@ -1164,10 +1164,10 @@ lt=.. Relation - 873 - 90 - 54 - 180 + 1695 + 150 + 90 + 300 lt=<<- 40.0;10.0;40.0;160.0;10.0;160.0;10.0;180.0 @@ -1175,10 +1175,10 @@ lt=.. UMLClass - 918 - 522 - 63 - 27 + 1770 + 870 + 105 + 45 TSAnnc @@ -1186,10 +1186,10 @@ lt=.. UMLClass - 1062 - 522 - 63 - 27 + 2010 + 870 + 105 + 45 TSIAnnc @@ -1197,10 +1197,10 @@ lt=.. Relation - 1044 - 90 - 72 - 450 + 1980 + 150 + 120 + 750 lt=<<- 10.0;10.0;10.0;460.0;60.0;460.0;60.0;480.0 @@ -1208,10 +1208,10 @@ lt=.. Relation - 945 - 90 - 126 - 450 + 1815 + 150 + 210 + 750 lt=<<- 120.0;10.0;120.0;460.0;10.0;460.0;10.0;480.0 @@ -1219,10 +1219,10 @@ lt=.. Relation - 567 - 18 - 81 - 72 + 1185 + 30 + 135 + 120 lt=<<- 70.0;10.0;70.0;40.0;10.0;40.0;10.0;60.0 @@ -1230,10 +1230,10 @@ lt=.. UMLClass - 558 - 72 - 36 - 27 + 1170 + 120 + 60 + 45 PCH @@ -1241,10 +1241,10 @@ lt=.. UMLClass - 819 - 252 - 36 - 27 + 1605 + 420 + 60 + 45 TSB @@ -1252,10 +1252,10 @@ lt=.. UMLClass - 990 - 522 - 63 - 27 + 1890 + 870 + 105 + 45 TSBAnnc @@ -1263,10 +1263,10 @@ lt=.. UMLClass - 693 - 162 - 63 - 27 + 1395 + 270 + 105 + 45 PCH_PCU @@ -1274,10 +1274,10 @@ lt=.. Relation - 747 - 90 - 126 - 90 + 1485 + 150 + 210 + 150 lt=<<- 10.0;10.0;10.0;60.0;120.0;60.0;120.0;80.0 @@ -1285,10 +1285,10 @@ lt=.. UMLClass - 405 - 72 - 36 - 27 + 915 + 120 + 60 + 45 CRS @@ -1296,10 +1296,10 @@ lt=.. Relation - 414 - 18 - 234 - 72 + 930 + 30 + 390 + 120 lt=<<- 240.0;10.0;240.0;40.0;10.0;40.0;10.0;60.0 @@ -1307,10 +1307,10 @@ lt=.. UMLClass - 774 - 252 - 36 - 27 + 1530 + 420 + 60 + 45 SMD @@ -1318,10 +1318,10 @@ lt=.. Relation - 783 - 90 - 144 - 180 + 1545 + 150 + 240 + 300 lt=<<- 140.0;10.0;140.0;160.0;10.0;160.0;10.0;180.0 @@ -1329,10 +1329,10 @@ lt=.. UMLClass - 846 - 522 - 63 - 27 + 1650 + 870 + 105 + 45 SMDAnnc @@ -1340,10 +1340,10 @@ lt=.. Relation - 873 - 90 - 198 - 450 + 1695 + 150 + 330 + 750 lt=<<- 200.0;10.0;200.0;460.0;10.0;460.0;10.0;480.0 @@ -1351,10 +1351,10 @@ lt=.. UMLClass - 864 - 432 - 45 - 27 + 1680 + 720 + 75 + 45 WIFIC @@ -1362,21 +1362,21 @@ lt=.. Relation - 666 - 270 - 243 - 180 + 1275 + 450 + 480 + 300 lt=<<- - 10.0;10.0;10.0;160.0;250.0;160.0;250.0;180.0 + 10.0;10.0;10.0;160.0;300.0;160.0;300.0;180.0 UMLClass - 990 - 612 - 72 - 27 + 1890 + 1020 + 120 + 45 WIFICAnnc @@ -1384,21 +1384,21 @@ lt=.. Relation - 702 - 540 - 342 - 90 + 1290 + 900 + 690 + 150 lt=<<- - 10.0;10.0;10.0;60.0;360.0;60.0;360.0;80.0 + 10.0;10.0;10.0;60.0;440.0;60.0;440.0;80.0 UMLClass - 441 - 432 - 45 - 27 + 975 + 720 + 75 + 45 DATC @@ -1406,21 +1406,21 @@ lt=.. Relation - 459 - 270 - 234 - 180 + 1005 + 450 + 315 + 300 lt=<<- - 240.0;10.0;240.0;160.0;10.0;160.0;10.0;180.0 + 190.0;10.0;190.0;160.0;10.0;160.0;10.0;180.0 UMLClass - 306 - 612 - 72 - 27 + 750 + 1020 + 120 + 45 DATCAnnc @@ -1428,21 +1428,21 @@ lt=.. Relation - 333 - 540 - 396 - 90 + 795 + 900 + 540 + 150 lt=<<- - 420.0;10.0;420.0;60.0;10.0;60.0;10.0;80.0 + 340.0;10.0;340.0;60.0;10.0;60.0;10.0;80.0 UMLClass - 693 - 72 - 117 - 27 + 1395 + 120 + 195 + 45 /VirtualResource/ lt=.. @@ -1451,10 +1451,10 @@ lt=.. UMLClass - 828 - 162 - 54 - 27 + 1620 + 270 + 90 + 45 TS_OL @@ -1462,10 +1462,10 @@ lt=.. UMLClass - 765 - 162 - 54 - 27 + 1515 + 270 + 90 + 45 TS_LA @@ -1473,10 +1473,10 @@ lt=.. Relation - 567 - 90 - 207 - 90 + 1185 + 150 + 345 + 150 lt=<<- 210.0;10.0;210.0;60.0;10.0;60.0;10.0;80.0 @@ -1484,10 +1484,10 @@ lt=.. Relation - 621 - 18 - 153 - 72 + 1275 + 30 + 255 + 120 lt=<<- 10.0;10.0;10.0;40.0;150.0;40.0;150.0;60.0 @@ -1495,10 +1495,10 @@ lt=.. UMLClass - 351 - 252 - 45 - 27 + 675 + 420 + 75 + 45 ACTR @@ -1506,21 +1506,21 @@ lt=.. Relation - 369 - 90 - 558 - 180 + 705 + 150 + 1080 + 300 lt=<<- - 600.0;10.0;600.0;160.0;10.0;160.0;10.0;180.0 + 700.0;10.0;700.0;160.0;10.0;160.0;10.0;180.0 Relation - 495 - 90 - 279 - 90 + 1065 + 150 + 465 + 150 lt=<<- 290.0;10.0;290.0;60.0;10.0;60.0;10.0;80.0 @@ -1528,10 +1528,10 @@ lt=.. UMLClass - 540 - 252 - 45 - 27 + 990 + 420 + 75 + 45 DEPR @@ -1539,21 +1539,21 @@ lt=.. Relation - 558 - 90 - 369 - 180 + 1020 + 150 + 765 + 300 lt=<<- - 390.0;10.0;390.0;160.0;10.0;160.0;10.0;180.0 + 490.0;10.0;490.0;160.0;10.0;160.0;10.0;180.0 UMLClass 0 - 522 - 72 - 27 + 870 + 120 + 45 ACTRAnnc @@ -1561,21 +1561,21 @@ lt=.. Relation - 27 - 90 - 1044 - 450 + 45 + 150 + 1980 + 750 lt=<<- - 1140.0;10.0;1140.0;460.0;10.0;460.0;10.0;480.0 + 1300.0;10.0;1300.0;460.0;10.0;460.0;10.0;480.0 UMLClass - 432 - 522 - 72 - 27 + 720 + 870 + 120 + 45 DEPRAnnc @@ -1583,10 +1583,10 @@ lt=.. Relation - 1017 - 90 - 54 - 450 + 1935 + 150 + 90 + 750 lt=<<- 40.0;10.0;40.0;460.0;10.0;460.0;10.0;480.0 @@ -1594,21 +1594,21 @@ lt=.. Relation - 459 - 90 - 612 - 450 + 765 + 150 + 1260 + 750 lt=<<- - 660.0;10.0;660.0;460.0;10.0;460.0;10.0;480.0 + 820.0;10.0;820.0;460.0;10.0;460.0;10.0;480.0 UMLClass - 180 - 369 - 117 - 36 + 420 + 615 + 195 + 60 /FlexContainer/ /Specializations/ @@ -1619,32 +1619,32 @@ valign=center Relation - 288 - 360 - 684 - 45 + 600 + 600 + 1260 + 75 lt=<<.. - 740.0;10.0;740.0;30.0;10.0;30.0 + 820.0;10.0;820.0;30.0;10.0;30.0 Relation - 288 - 90 - 261 - 315 + 600 + 150 + 555 + 525 lt=<<.. - 270.0;10.0;270.0;40.0;20.0;40.0;20.0;330.0;10.0;330.0 + 350.0;10.0;350.0;40.0;30.0;40.0;30.0;330.0;10.0;330.0 UMLClass - 909 - 252 - 126 - 27 + 1755 + 420 + 210 + 45 /ContainerResource/ lt=.. @@ -1653,21 +1653,21 @@ lt=.. Relation - 504 - 90 - 423 - 180 + 930 + 150 + 855 + 300 lt=<<- - 450.0;10.0;450.0;160.0;10.0;160.0;10.0;180.0 + 550.0;10.0;550.0;160.0;10.0;160.0;10.0;180.0 Relation - 900 - 90 - 81 - 180 + 1740 + 150 + 135 + 300 lt=<<- 10.0;10.0;10.0;160.0;70.0;160.0;70.0;180.0 @@ -1675,12 +1675,100 @@ lt=.. Relation - 828 - 90 - 99 - 180 + 1620 + 150 + 165 + 300 lt=<<- 90.0;10.0;90.0;160.0;10.0;160.0;10.0;180.0 + + UMLClass + + 1155 + 420 + 60 + 45 + + LCP + + + + UMLClass + + 1455 + 420 + 60 + 45 + + SCH + + + + Relation + + 1170 + 150 + 615 + 300 + + lt=<<- + 390.0;10.0;390.0;160.0;10.0;160.0;10.0;180.0 + + + Relation + + 1470 + 150 + 315 + 300 + + lt=<<- + 190.0;10.0;190.0;160.0;10.0;160.0;10.0;180.0 + + + UMLClass + + 1530 + 870 + 105 + 45 + + SCHAnnc + + + + Relation + + 1575 + 150 + 450 + 750 + + lt=<<- + 280.0;10.0;280.0;460.0;10.0;460.0;10.0;480.0 + + + UMLClass + + 1110 + 870 + 105 + 45 + + LCPAnnc + + + + Relation + + 1155 + 150 + 870 + 750 + + lt=<<- + 560.0;10.0;560.0;460.0;10.0;460.0;10.0;480.0 + diff --git a/init/attributePolicies.ap b/init/attributePolicies.ap index db430998..5bdcc28c 100644 --- a/init/attributePolicies.ap +++ b/init/attributePolicies.ap @@ -21,7 +21,7 @@ "rtypes": [ "ALL" ], "lname": "resourceID", "ns": "m2m", - "type": "string", + "type": "ID", "car": "1", "oc": "NP", "ou": "NP", @@ -61,7 +61,7 @@ "rtypes": [ "ALL" ], "lname": "parentID", "ns": "m2m", - "type": "string", + "type": "ID", "car": "1", "oc": "NP", "ou": "NP", @@ -75,7 +75,7 @@ "lname": "accessControlPolicyIDs", "ns": "m2m", "type": "list", - "ltype": "string", + "ltype": "ID", "car": "01L", "oc": "O", "ou": "O", @@ -182,7 +182,7 @@ "lname": "announcedAttribute", "ns": "m2m", "type": "list", - "ltype": "string", + "ltype": "ncname", "car": "01L", "oc": "NP", "ou": "NP", @@ -194,7 +194,7 @@ "lname": "announcedAttribute", "ns": "m2m", "type": "list", - "ltype": "string", + "ltype": "ncname", "car": "01L", "oc": "O", "ou": "O", @@ -244,11 +244,11 @@ ], "loc": [ { - "rtypes": [ "ALL" ], + "rtypes": [ "AE", "AEAnnc", "CSEBase", "CSEBaseAnnc", "CSR", "CSRAnnc", "CIN", "CINAnnc", + "CNT", "CNTAnnc", "FCI", "FCNT", "FCNTAnnc", "TS", "TSAnnc", "TSI", "TSIAnnc", "REQRESP" ], "lname": "location", "ns": "m2m", - "type": "list", - "ltype": "m2m:geoCoordinates", + "type": "m2m:geoCoordinates", "car": "01L", "oc": "O", "ou": "O", @@ -256,7 +256,7 @@ "annc": "OA" }, { - "rtypes": [ "DVI" ], + "rtypes": [ "DVI", "DVIAnnc" ], "lname": "location", "ns": "m2m", "type": "string", @@ -272,7 +272,7 @@ "rtypes": [ "ALL" ], "lname": "custodian", "ns": "m2m", - "type": "string", + "type": "ID", "car": "01", "oc": "O", "ou": "O", @@ -315,7 +315,7 @@ "act": [ { "rtypes": [ "ALL" ], - "lname": "accessControWindow", + "lname": "activate", "ns": "m2m", "type": "boolean", "car": "01", @@ -357,7 +357,7 @@ "rtypes": [ "ALL" ], "lname": "AE-ID", "ns": "m2m", - "type": "string", + "type": "ID", "car": "1", "oc": "NP", "ou": "NP", @@ -365,6 +365,19 @@ "annc": "OA" } ], + "aid": [ + { + "rtypes": [ "LCP", "LCPAnnc" ], + "lname": "authID", + "ns": "m2m", + "type": "string", + "car": "01", + "oc": "O", + "ou": "NP", + "od": "NP", + "annc": "OA" + } + ], "air": [ { "rtypes": [ "ACTR", "ACTRAnnc", "REQRESP" ], @@ -556,7 +569,7 @@ "rtypes": [ "TSB", "TSBAnnc", "REQRESP" ], "lname": "beaconRequester", "ns": "m2m", - "type": "string", + "type": "ID", "car": "01", "oc": "O", "ou": "NP", @@ -845,7 +858,7 @@ "rtypes": [ "ALL" ], "lname": "creator", "ns": "m2m", - "type": "string", + "type": "ID", "car": "01", "oc": "O", "ou": "NP", @@ -893,7 +906,7 @@ "rtypes": [ "CSEBase", "CSRAnnc" ], "lname": "CSE-ID", "ns": "m2m", - "type": "string", + "type": "ID", "car": "1", "oc": "M", "ou": "NP", @@ -904,7 +917,7 @@ "rtypes": [ "CSR" ], "lname": "CSE-ID", "ns": "m2m", - "type": "string", + "type": "ID", "car": "1", "oc": "O", "ou": "NP", @@ -1360,6 +1373,39 @@ "annc": "OA" } ], + "gec": [ + { + "rtypes": [ "LCP", "LCPAnnc" ], + "lname": "geofenceEventCriteria", + "ns": "m2m", + "type": "enum", + "etype": "m2m:geofenceEventCriteria", + "car": "01", + "oc": "O", + "ou": "O", + "od": "O", + "annc": "OA" + } + ], + "geom": [ + { + "rtypes": [ "REQRESP" ], + "lname": "geometry", + "ns": "m2m", + "type": "geoJsonCoordinate", + "car": "1" + } + ], + "gmty": [ + { + "rtypes": [ "REQRESP" ], + "lname": "geometryType", + "ns": "m2m", + "type": "enum", + "etype": "m2m:geometryType", + "car": "1" + } + ], "gn": [ { "rtypes": [ "ALL" ], @@ -1386,6 +1432,29 @@ "annc": "NA" } ], + "gsf": [ + { + "rtypes": [ "REQRESP" ], + "lname": "geoSpatialFunction", + "ns": "m2m", + "type": "enum", + "etype": "m2m:geoSpatialFunctionType", + "car": "1" + } + ], + "gta": [ + { + "rtypes": [ "LCP", "LCPAnnc" ], + "lname": "geographicalTargetArea", + "ns": "m2m", + "type": "any", + "car": "01", + "oc": "O", + "ou": "O", + "od": "O", + "annc": "OA" + } + ], "hael": [ { "rtypes": [ "ALL" ], @@ -1405,7 +1474,7 @@ "rtypes": [ "ALL" ], "lname": "hostedCSELink", "ns": "m2m", - "type": "string", + "type": "ID", "car": "01", "oc": "O", "ou": "O", @@ -1575,6 +1644,20 @@ "annc": "OA" } ], + "lit": [ + { + "rtypes": [ "LCP", "LCPAnnc" ], + "lname": "locationInformationType", + "ns": "m2m", + "type": "enum", + "etype": "m2m:locationInformationType", + "car": "1", + "oc": "O", + "ou": "O", + "od": "O", + "annc": "OA" + } + ], "ln": [ { "rtypes": [ "ALL" ], @@ -1606,8 +1689,8 @@ { "rtypes": [ "ACPAnnc", "ACTRAnnc", "AEAnnc", "ANDIAnnc", "ANIAnnc", "BATAnnc", "CINAnnc", "CNTAnnc", "CSEBaseAnnc", "CSRAnnc", "DATCAnnc", "DEPRAnnc", "DVCAnnc", "DVIAnnc", "EVLAnnc", - "FCNTAnnc", "FWRAnnc", "GRPAnnc", "MEMAnnc", "NODAnnc", "NYCFCAnnc", "RBOAnnc", - "SMDAnnc", "SWRAnnc", "TSAnnc", "TSBAnnc", "TSIAnnc", "WIFIC", "WIFICAnnc", + "FCNTAnnc", "FWRAnnc", "GRPAnnc", "LCPAnnc", "MEMAnnc", "NODAnnc", "NYCFCAnnc", "RBOAnnc", + "SCHAnnc", "SMDAnnc", "SWRAnnc", "TSAnnc", "TSBAnnc", "TSIAnnc", "WIFIC", "WIFICAnnc", "REQRESP" ], "lname": "link", "ns": "m2m", @@ -1619,6 +1702,114 @@ "annc": "MA" } ], + "loi": [ + { + "rtypes": [ "LCP", "LCPAnnc" ], + "lname": "locationContainerID", + "ns": "m2m", + "type": "anyURI", + "car": "1", + "oc": "NP", + "ou": "NP", + "od": "NP", + "annc": "OA" + } + ], + "lon": [ + { + "rtypes": [ "LCP", "LCPAnnc" ], + "lname": "locationContainerName", + "ns": "m2m", + "type": "string", + "car": "01", + "oc": "O", + "ou": "NP", + "od": "NP", + "annc": "OA" + } + ], + "los": [ + { + "rtypes": [ "LCP", "LCPAnnc" ], + "lname": "locationSource", + "ns": "m2m", + "type": "enum", + "etype": "m2m:locationSource", + "car": "01", + "oc": "M", + "ou": "NP", + "od": "NP", + "annc": "OA" + } + ], + "lost": [ + { + "rtypes": [ "LCP", "LCPAnnc" ], + "lname": "locationStatus", + "ns": "m2m", + "type": "string", + "car": "1", + "oc": "NP", + "ou": "NP", + "od": "NP", + "annc": "OA" + } + ], + "lor": [ + { + "rtypes": [ "LCP", "LCPAnnc" ], + "lname": "locationServer", + "ns": "m2m", + "type": "anyURI", + "car": "01", + "oc": "O", + "ou": "NP", + "od": "NP", + "annc": "OA" + } + ], + "lot": [ + { + "rtypes": [ "LCP", "LCPAnnc" ], + "lname": "locationTargetID", + "ns": "m2m", + "type": "string", + "car": "01L", + "oc": "O", + "ou": "NP", + "od": "NP", + "annc": "OA" + } + ], + "lou": [ + { + "rtypes": [ "LCP", "LCPAnnc" ], + "lname": "locationUpdatePeriod", + "ns": "m2m", + "type": "list", + "ltype": "duration", + "car": "01L", + "oc": "O", + "ou": "O", + "od": "O", + "annc": "OA" + } + ], + "luec": [ + { + "rtypes": [ "LCP", "LCPAnnc" ], + "lname": "locationUpdateEventCriteria", + "ns": "m2m", + "type": "enum", + "etype": "m2m:locationUpdateEventCriteria", + "car": "01", + "oc": "O", + "ou": "O", + "od": "O", + "annc": "OA" + } + ], + // TODO Align later // EXPERIMENTAL "ma": [ @@ -1719,7 +1910,7 @@ "lname": "missingDataCurrentNr", "ns": "m2m", "type": "nonNegInteger", - "car": "01", + "car": "1", "oc": "NP", "ou": "NP", "od": "O", @@ -2044,6 +2235,33 @@ "annc": "OA" } ], + "nco": [ + { + "rtypes": [ "SCH", "SCHAnnc", "REQRESP" ], + "lname": "networkCoordinated", + "ns": "m2m", + "type": "boolean", + "car": "01", + "oc": "O", + "ou": "O", + "od": "O", + "annc": "OA" + } + ], + "nct": [ + { + "rtypes": [ "ALL" ], + "lname": "notificationContentType", + "ns": "m2m", + "type": "enum", + "etype": "m2m:notificationContentType", + "car": "1", + "oc": "O", + "ou": "O", + "od": "O", + "annc": "NA" + } + ], "nec": [ { "rtypes": [ "CRS" ], @@ -2068,17 +2286,12 @@ "annc": "NA" } ], - "nct": [ + "nev": [ { - "rtypes": [ "ALL" ], - "lname": "notificationContentType", + "rtypes": [ "UNKNOWN" ], + "lname": "notificationEvent", "ns": "m2m", - "type": "nonNegInteger", - "car": "1", - "oc": "O", - "ou": "O", - "od": "O", - "annc": "NA" + "type": "any" } ], "nfu": [ @@ -2273,7 +2486,7 @@ "rtypes": [ "ALL", "REQRESP" ], "lname": "originator", "ns": "m2m", - "type": "string", + "type": "ID", "car": "1", "oc": "NP", "ou": "NP", @@ -2479,6 +2692,14 @@ "annc": "OA" } ], + "rep": [ + { + "rtypes": [ "UNKNOWN" ], + "lname": "representation", + "ns": "m2m", + "type": "any" + } + ], "rid": [ { "rtypes": [ "ALL", "REQRESP" ], @@ -2505,6 +2726,19 @@ "annc": "NA" } ], + "rlkl": [ + { + "rtypes": [ "LCP", "LCPAnnc" ], + "lname": "retrieveLastKnownLocation", + "ns": "m2m", + "type": "boolean", + "car": "01", + "oc": "O", + "ou": "O", + "od": "O", + "annc": "OA" + } + ], "rms": [ { "rtypes": [ "ALL" ], @@ -2671,7 +2905,7 @@ "lname": "sessionCapabilities", "ns": "m2m", "type": "list", - "ltype": "string", // m2m:sessionCapabilities + "ltype": "string", "car": "01", "oc": "O", "ou": "O", @@ -2679,6 +2913,19 @@ "annc": "OA" } ], + "se": [ + { + "rtypes": [ "SCH", "SCHAnnc", "REQRESP" ], + "lname": "scheduleElement", + "ns": "m2m", + "type": "m2m:scheduleEntries", + "car": "1L", + "oc": "M", + "ou": "O", + "od": "O", + "annc": "OA" + } + ], "sfc": [ { "rtypes": [ "DEPR", "DEPRAnnc", "REQRESP" ], @@ -2692,6 +2939,14 @@ "annc": "OA" } ], + "m2m:sgn": [ + { + "rtypes": [ "UNKNOWN" ], + "lname": "notification", + "ns": "m2m", + "type": "any" + } + ], "sld": [ { "rtypes": [ "ALL" ], @@ -2917,6 +3172,14 @@ "annc": "NA" } ], + "sur": [ + { + "rtypes": [ "UNKNOWN" ], + "lname": "subscriptionReference", + "ns": "m2m", + "type": "any" + } + ], // EXPERIMENTAL @@ -3157,6 +3420,14 @@ "annc": "OA" } ], + "m2m:uri": [ + { + "rtypes": [ "UNKNOWN" ], + "lname": "URI", + "ns": "m2m", + "type": "any" + } + ], "url": [ { "rtypes": [ "ALL" ], @@ -3196,6 +3467,15 @@ "annc": "OA" } ], + "vrq": [ + { + "rtypes": [ "UNKNOWN" ], + "lname": "verificationRequest", + "ns": "m2m", + "type": "boolean" + } + ], + "wcrds": [ { "rtypes": [ "WIFIC", "WIFICAnnc", "REQRESP" ], diff --git a/init/complexTypePolicies.ap b/init/complexTypePolicies.ap index 2d7d6099..78e8ae3d 100644 --- a/init/complexTypePolicies.ap +++ b/init/complexTypePolicies.ap @@ -18,7 +18,7 @@ "lname": "attribute", "ns": "m2m", "type": "list", - "ltype": "string", //m2m:attributeList + "ltype": "m2m:attribute", "car": "01L" }, { @@ -26,8 +26,8 @@ "ctype": "m2m:eventNotificationCriteria", "lname": "attribute", "ns": "m2m", - "type": "list", - "ltype": "string", //m2m:attributeList + "type": "list", //m2m:attributeList + "ltype": "ncname", "car": "01L" } ], @@ -112,10 +112,10 @@ { "rtypes": [ "COMPLEX" ], "ctype": "m2m:requestPrimitive", - "lname": "resourceType", + "lname": "desiredIdentifierResultType", "ns": "m2m", "type": "enum", - "etype": "m2m:resourceType", + "etype": "m2m:desIdResType", "car": "01" } ], @@ -219,7 +219,7 @@ "ctype": "m2m:operationResult", "lname": "from", "ns": "m2m", - "type": "string", + "type": "ID", "car": "01" }, { @@ -227,14 +227,14 @@ "ctype": "m2m:requestPrimitive", "lname": "from", "ns": "m2m", - "type": "anyURI", + "type": "ID", "car": "01" }, { "rtypes": [ "REQRESP" ], "lname": "from", "ns": "m2m", - "type": "anyURI", + "type": "ID", "car": "01", "oc": "M", "ou": "NP", @@ -274,7 +274,7 @@ "ctype": "m2m:contentRef", "lname": "name", "ns": "m2m", - "type": "string", // xs:NCName + "type": "ncname", "car": "1" }, { @@ -282,7 +282,7 @@ "ctype": "m2m:attribute", "lname": "name", "ns": "m2m", - "type": "string", + "type": "ncname", "car": "1" } ], @@ -360,17 +360,17 @@ { "rtypes": [ "COMPLEX" ], "ctype": "m2m:filterCriteria", - "lname": "accessControlOperations", + "lname": "operations", "ns": "m2m", - "type": "nonNegInteger", // m2m:accessControlOperations. Not just an enum, but a bitmap + "type": "nonNegInteger", "car": "01" }, { "rtypes": [ "COMPLEX" ], "ctype": "m2m:operationMonitor", - "lname": "accessControlOperations", + "lname": "operations", "ns": "m2m", - "type": "nonNegInteger", // m2m:accessControlOperations. Not just an enum, but a bitmap + "type": "nonNegInteger", "car": "01" } ], @@ -535,7 +535,15 @@ "ns": "m2m", "type": "absRelTimestamp", "car": "01" + }, + { + "rtypes": [ "REQRESP" ], + "lname": "resultExpirationTimestamp", + "ns": "m2m", + "type": "absRelTimestamp", + "car": "01" } + ], "rt": [ { @@ -634,7 +642,7 @@ "ctype": "m2m:operationResult", "lname": "to", "ns": "m2m", - "type": "anyURI", + "type": "ID", "car": "01" }, { @@ -642,14 +650,14 @@ "ctype": "m2m:requestPrimitive", "lname": "to", "ns": "m2m", - "type": "anyURI", + "type": "ID", "car": "1" }, { "rtypes": [ "REQRESP" ], "lname": "to", "ns": "m2m", - "type": "string", + "type": "ID", "car": "1", "oc": "M", "ou": "NP", @@ -941,7 +949,7 @@ "lname": "accessControlAttributes", "ns": "m2m", "type": "list", // m2m:attributeList - "ltype": "string", + "ltype": "ncname", "car": "01" } ], @@ -956,15 +964,15 @@ "ctype": "m2m:actionInput", "lname": "contentString", "ns": "m2m", - "type": "string", // xs:NCName + "type": "ncname", "car": "01" } ], - "ri": [ + "rsid": [ { "rtypes": [ "COMPLEX" ], "ctype": "m2m:actionInput", - "lname": "resourceID", + "lname": "ID", "ns": "m2m", "type": "anyURI", "car": "01" @@ -1191,7 +1199,7 @@ "ctype": "m2m:evalCriteria", "lname": "subject", "ns": "m2m", - "type": "string", // TODO "type": "xs:NCName", + "type": "ncname", "car": "1" } ], @@ -1418,16 +1426,16 @@ "car": "01" } ], - "gq": [ - { - "rtypes": [ "COMPLEX" ], - "ctype": "m2m:filterCriteria", - "lname": "geoQuery", - "ns": "m2m", - "type": "m2m:geoQuery", - "car": "01" - } - ], + // "gq": [ + // { + // "rtypes": [ "COMPLEX" ], + // "ctype": "m2m:filterCriteria", + // "lname": "geoQuery", + // "ns": "m2m", + // "type": "m2m:geoQuery", + // "car": "01" + // } + // ], // @@ -1440,46 +1448,7 @@ "ctype": "m2m:geoCoordinates", "lname": "coordinates", "ns": "m2m", - "type": "list", // TODO m2m:listOfCoordinates -> list of list of floats or GeoJSON? - "ltype": "string", - "car": "1" - } - ], - - - // - // m2m:geoQuery - // - "gmty": [ - { - "rtypes": [ "COMPLEX" ], - "ctype": "m2m:geoQuery", - "lname": "geometryType", - "ns": "m2m", - "type": "enum", - "etype": "m2m:geometryType", - "car": "1" - } - ], - "geom": [ - { - "rtypes": [ "COMPLEX" ], - "ctype": "m2m:geoQuery", - "lname": "geometryType", - "ns": "m2m", - "type": "list", // TODO m2m:listOfCoordinates -> list of list of floats or GeoJSON? - "ltype": "string", - "car": "1" - } - ], - "gsf": [ - { - "rtypes": [ "COMPLEX" ], - "ctype": "m2m:geoQuery", - "lname": "geoSpatialFunction", - "ns": "m2m", - "type": "enum", - "etype": "m2m:geoSpatialFunctionType", + "type": "geoJsonCoordinate", "car": "1" } ], @@ -1596,7 +1565,7 @@ "lname": "tokenIDs", "ns": "m2m", "type": "list", - "ltype": "string", // m2m:tokenID TODO validate + "ltype": "ncname", "car": "01L" } ], @@ -1604,10 +1573,10 @@ { "rtypes": [ "COMPLEX" ], "ctype": "m2m:metaInformation", - "lname": "tokenIDs", + "lname": "localTokenIDs", "ns": "m2m", "type": "list", - "ltype": "string", // xs:NCName TODO validate + "ltype": "ncname", "car": "01L" } ], @@ -1680,7 +1649,7 @@ "lname": "ontologyMappingResources", "ns": "m2m", "type": "list", - "ltype": "string", // m2m:listOfM2MID + "ltype": "ID", // m2m:listOfM2MID "car": "01" } ], @@ -1738,7 +1707,7 @@ "ctype": "m2m:operationMonitor", "lname": "originator", "ns": "m2m", - "type": "string", // m2m:ID + "type": "ID", "car": "01" } ], @@ -1867,8 +1836,9 @@ "ctype": "m2m:scheduleEntries", "lname": "scheduleEntry", "ns": "m2m", - "type": "schedule", - "car": "01" + "type": "list", + "ltype": "schedule", + "car": "1LN" } ], diff --git a/init/configurations.docmd b/init/configurations.docmd index 42807d42..a3c42cf8 100644 --- a/init/configurations.docmd +++ b/init/configurations.docmd @@ -735,7 +735,7 @@ The default value is `False`. This setting specifies the network interface on which the CSE's HTTP server is listening. Use `0.0.0.0` to listen on all available interfaces. -The default value is `127.0.0.1`. +The default value is `0.0.0.0`. @@ -850,6 +850,87 @@ The default value is `False`. +# http.security.enableBasicAuth + +This setting enables or disables the CSE's HTTP server's basic authentication support. +If enabled, the CSE's HTTP server will only accept incoming connections from clients that provide a valid username and password in the HTTP *Authorization* header. + +Can be enabled together with token authentication. + +The default value is `False`. + + + +# http.security.enableTokenAuth + +This setting enables or disables the CSE's HTTP server's token authentication support. +If enabled, the CSE's HTTP server will only accept incoming connections from clients that provide a valid token in the HTTP *Authorization* header. + +Can be enabled together with basic authentication. + +The default value is `False`. + + + +# http.security.basicAuthFile + +This setting specifies the path to the CSE's HTTP server's basic authentication file. +The file must contain lines with the format "username:password". +Comments are lines starting with a #. + +The default value is `${basic.config:dataDirectory}/certs/http_basic_auth.txt`. + + + +# http.security.tokenAuthFile + +This setting specifies the path to the CSE's HTTP server's token authentication file. +The file must contain lines with the format "token". +Comments are lines starting with a #. + +The default value is `${basic.config:dataDirectory}/certs/http_token_auth.txt`. + + + +# http.wsgi + +This section contains settings that control the CSE's HTTP server's WSGI support. + +The *Web Server Gateway Interface* is a simple calling convention for web servers to forward requests to +web applications. It is intended to be used together with a reverse proxy server or API gateway, for example *nginx*. + +Note, that the CSE's HTTP server's WSGI implementation does not support TLS. It is intended for use in +a local network and behind a secure gateway only. + + + +# http.wsgi.enable + +This setting enables or disables the CSE's HTTP server's WSGI support. + +The default value is `False`. + + + +# http.wsgi.connectionLimit + +This setting specifies the number of possible parallel connections that can be accepted by the WSGI server. +One connection uses one system file descriptor. + +The default value is `100`. + + + +# http.wsgi.threadPoolSize + +This setting specifies the number of threads used to process requests. + +This number should be of similar size as the *connectionLimit* setting. + +The default value is `100`. + + + # logging This section contains settings that control the CSE's logging behavior. @@ -908,6 +989,16 @@ The default value is `debug`. +# logging.maxLogMessageLength + +This setting specifies the maximum length of a log message. Longer messages will be truncated. + +A value of 0 means no truncation. + +The default value is `1000` characters. + + + # logging.path This setting specifies the path to the CSE's log files. @@ -980,7 +1071,7 @@ The default value is `60 seconds`. This setting specifies the network interface on which the CSE's MQTT client is binding to. Use `0.0.0.0` to listen on all available interfaces. -The default value is `127.0.0.1`. +The default value is `0.0.0.0`. @@ -1156,6 +1247,45 @@ This setting specifies the CSE's default for the CNT's *mbs* (maxByteSize) attri The default value is `10000 bytes`. +# resource.grp + +This section specifies the CSE's defaults for GRP (Group) resources and the GroupManager service. + +Settings in this section are listed under the `[resource.grp]` section. + + +# resource.grp.resultExpirationTime + +Set the time for the GroupManager for aggregating the results of a group request before interrupting. The format is the time in ms. + +A value of 0 ms means no timeout. + +The default is `0` ms. + + +# resource.lcp + +This section specifies the CSE's defaults for LCP (LocationPolicy) resources. + +Settings in this section are listed under the `[resource.lcp]` section. + + + +# resource.lcp.mni + +This setting specifies the value of the *mni* (maxNrOfInstances) attribute for the "locations" CNT resource that is created by the CSE when the LCP is created. + +The default value is `10`. + + + +# resource.lcp.mbs + +This setting specifies the value of the *mbs* (maxByteSize) attribute for the "locations" CNT resource that is created by the CSE when the LCP is created. + +The default value is `10000 bytes`. + + # resource.req @@ -1273,6 +1403,14 @@ The default value is `2.0 seconds`. +# scripting.maxRuntime + +This setting specifies the maximum runtime, in seconds, for a script execution. + +The default value is `60 seconds`. `0.0 seconds` means no timeout. + + + # scripting.scriptDirectories This setting specifies a comma-separated list of directories that contain additional CSE's script files. diff --git a/init/demoLightbulb/init.as b/init/demoLightbulb/init.as index 1b9ceb60..230bd89c 100644 --- a/init/demoLightbulb/init.as +++ b/init/demoLightbulb/init.as @@ -59,7 +59,7 @@ the *lightswitch*.") "pv": { "acr": [ { ;; Allow CDemoLightbulb only to retrieve - "acor": [ "CDemoLightswitch" ], + "acor": [ "CDemoLightswitch"], "acop": 16 ;; NOTIFY }, { ;; Allow CDemoLightswitch all access diff --git a/init/enumTypesPolicies.ep b/init/enumTypesPolicies.ep index 306a0005..bdc25acb 100644 --- a/init/enumTypesPolicies.ep +++ b/init/enumTypesPolicies.ep @@ -7,101 +7,413 @@ { "m2m:batteryStatus" : { - "evalues": [ "1..7" ] + "1": "Normal", + "2": "Charging", + "3": "Charging complete", + "4": "Damaged", + "5": "Low battery", + "6": "Not installed", + "7": "Unknown" }, "m2m:contentStatus" : { - "evalues": [ 1, 2 ] + "1": "Partial content", + "2": "Full content" }, "m2m:evalCriteriaOperator" : { - "evalues": [ "1..6" ] + "1": "equal", + "2": "not equal", + "3": "greater than", + "4": "less than", + "5": "greater than or equal", + "6": "less than or equal" }, "m2m:evalMode" : { - "evalues": [ "0..3" ] + "0": "off", + "1": "once", + "2": "periodic", + "3": "continuous" }, "m2m:eventCat" : { // m2m:stdEventCat + user defined range - "evalues" : [ "2..4", "100..999"] + "2": "Immediate", + "3": "Best Effort", + "4": "Latest", + "100..999": "User defined" }, // EXPERIMENTAL "m2m:eventEvaluationMode" : { - "evalues" : [ "1..5" ] + "1": "All events present", + "2": "All or some events present", + "3": "All or some events missing", + "4": "All events missing", + "5": "Some events missing" }, "m2m:filterOperation" : { - "evalues" : [ "1..3" ] + "1": "Logical AND", + "2": "Logical OR", + "3": "Logical XOR" }, "m2m:contentFilterSyntax" : { - "evalues" : [ 1 ] + "1": "JSONPath Syntax" }, "m2m:desIdResType" : { - "evalues": [ 1, 2 ] + "1": "Structured", + "2": "Unstructured" }, "m2m:logTypeId" : { - "evalues": [ "1..5" ] + "1": "System", + "2": "Security", + "3": "Event", + "4": "Trace", + "5": "Panic" }, "m2m:filterUsage" : { - "evalues" : [ "1..4" ] + "1": "Discovery", + "2": "Conditional Operation", + "3": "IPE On-demand Discovery" + }, + "m2m:geofenceEventCriteria" : { + "1": "Entering", + "2": "Leaving", + "3": "Inside", + "4": "Outside" }, "m2m:geometryType" : { - "evalues" : [ "1..6" ] + "1": "Point", + "2": "LineString", + "3": "Polygon", + "4": "MultiPoint", + "5": "MultiLineString", + "6": "MultiPolygon" }, "m2m:geoSpatialFunctionType" : { - "evalues": [ "1..3" ] + "1": "Within", + "2": "Contains", + "3": "Intersects" + }, + "m2m:locationInformationType" : { + "1": "Position fix", + "2": "Geofence event" + }, + "m2m:locationSource" : { + "1": "Network based", + "2": "Device based", + "3": "User based" + }, + "m2m:locationUpdateEventCriteria": { + "0": "Location_Change" }, "m2m:logStatus" : { - "evalues": [ "1..5" ] + "1": "Started", + "2": "Stopped", + "3": "Unknown", + "4": "Not present", + "5": "Error" }, "m2m:mgmtDefinition" : { // Adapt to supported MgmtObj types - "evalues" : [ "1001..1010", 1021, 1023, 1028 ] + "0": "Self-defined", + "1001": "firmware", + "1002": "software", + "1003": "memory", + "1004": "areaNwkInfo", + "1005": "areaNwkDeviceInfo", + "1006": "battery", + "1007": "deviceInfo", + "1008": "deviceCapability", + "1009": "reboot", + "1010": "eventLog", + "1011": "cmdhPolicy", + "1012": "activeCmdhPolicy", + "1013": "cmdhDefaults", + "1014": "cmdhDefEcValue", + "1015": "cmdhEcDefParamValues", + "1016": "cmdhLimits", + "1017": "cmdhNetworkAccessRules", + "1018": "cmdhNwAccessRule", + "1019": "cmdhBuffer", + "1020": "registration", + "1021": "dataCollection", + "1022": "authenticationProfile", + "1023": "myCertFileCred", + "1024": "trustAnchorCred", + "1025": "MAFClientRegCfg", + "1026": "MEFClientRegCfg", + "1027": "OAuth2Authentication", + "1028": "wifiClient" }, "m2m:multicastCapability" : { - "evalues" : [ 1, 2 ] + "1": "MBMS", + "2": "IP" + }, + "m2m:notificationContentType" : { + "1": "m2m:", + "2": "m2m:", + "3": "m2m:URI", + "4": "m2m:triggerPayload", + "5": "m2m:timeSeriesNotification" }, "m2m:notificationEventType" : { - "evalues": [ "1..8", 9, 10 ] // EXPERIMENTAL 9, 10 experimental + "1": "Update of Resource", + "2": "Delete of Resource", + "3": "Create of Direct Child Resource", + "4": "Delete of Direct Child Resource", + "5": "Retrieve of Container Resource with No Child Resource", + "6": "Trigger Received for AE Resource", + "7": "Blocking Update", + "8": "Report on Missing Data Points", + + "9": "blockingRetrieve (EXPERIMENTAL)", // EXPERIMENTAL + "10": "blockingRetrieveDirectChild (EXPERIMENTAL)" // EXPERIMENTAL }, "m2m:operation" : { - "evalues": [ "1..5" ] + "1": "Create", + "2": "Retrieve", + "3": "Update", + "4": "Delete", + "5": "Notify" }, "m2m:responseType" : { - "evalues" : [ "1..5" ] + "1": "Non-blocking Request Synch", + "2": "Non-blocking Request Asynch", + "3": "Blocking Request", + "4": "FlexBlocking", + "5": "No Response" }, "m2m:resourceType" : { // Adapt to supported resource types - "evalues" : [ "1..5", 9, "13..17", 23, 24, "28..30", 48, 58, 60, 65, 66, - "10001..10005", 10009, "10013..10014", 10016, 10021, "10028..10030", 10060, 10065, 10066 ] + "1": "accessControlPolicy", + "2": "AE", + "3": "container", + "4": "contentInstance", + "5": "CSEBase", + "9": "group", + "10": "locationPolicy", + "13": "mgmtObj", + "14": "node", + "15": "pollingChannel", + "16": "remoteCSE", + "17": "request", + "18": "schedule", + "23": "subscription", + "24": "semanticDescriptor", + "28": "flexContainer", + "29": "timeSeries", + "30": "timeSeriesInstance", + "48": "crossResourceSubscription", + "58": "flexContainerInstance", + "60": "timeSyncBeacon", + "65": "action", + "66": "dependency", + + "10001": "accessControlPolicyAnnc", + "10002": "AEAnnc", + "10003": "containerAnnc", + "10004": "contentInstanceAnnc", + "10005": "CSEBaseAnnc", + "10009": "groupAnnc", + "10010": "locationPolicyAnnc", + "10013": "mgmtObjAnnc", + "10014": "nodeAnnc", + "10016": "remoteCSEAnnc", + "10018": "scheduleAnnc", + "10024": "semanticDescriptorAnnc", + "10028": "flexContainerAnnc", + "10029": "timeSeriesAnnc", + "10030": "timeSeriesInstanceAnnc", + "10060": "timeSyncBeaconAnnc", + "10065": "actionAnnc", + "10066": "dependencyAnnc" }, "m2m:responseStatusCode" : { - "evalues": [ "1000..1002", - "2000..2002", 2004, - 4000, 4001, 4004, 4005, 4008, 4015, "4101..4133", "4135..4143", - 5000, 5001, 5103, "5105..5107", "5203..5222", "5230..5232", - 6003, 6005, 6010, "6020..6026", "6028..6034"] + "1000": "ACCEPTED", + "1001": "ACCEPTED for nonBlockingRequestSynch", + "1002": "ACCEPTED for nonBlockingRequestAsynch", + + "2000": "OK", + "2001": "CREATED", + "2002": "DELETED", + "2004": "UPDATED", + + "4000": "Bad Request", + "4001": "Release Version Not Supported", + "4004": "Not Found", + "4005": "Operation Not Allowed", + "4008": "Request Timeout", + "4015": "Unsupported Media Type", + "4101": "Subscription Creator Has No Privilege", + "4102": "Contents Unacceptable", + "4103": "Originator Has No Privilege", + "4104": "Group Request Identifier Exists", + "4105": "Conflict", + "4106": "Originator Has Not Registered", + "4107": "Security Association Required", + "4108": "Invalid Child Resource Type", + "4109": "No Members", + "4110": "Group Member Type Inconsistent", + "4111": "ESPRIM Unsupported Option", + "4112": "ESPRIM Unknown Key ID", + "4113": "ESPRIM Unknown Orig RAND ID", + "4114": "ESPRIM Unknown Recv RAND ID", + "4115": "ESPRIM Bad MAC", + "4116": "ESPRIM Impersonation Error", + "4117": "Originator Has Already Registered", + "4118": "Ontology Not Available", + "4119": "Linked Semantics Not Available", + "4120": "Invalid Semantics", + "4121": "Mashup Member Not Found", + "4122": "Invalid Trigger Purpose", + "4123": "Illegal Transaction State Transition Attempted", + "4124": "Blocking Subscription Already Exists", + "4125": "Specialization Schema Not Found", + "4126": "App Rule Validation Failed", + "4127": "Operation Denied By Remote Entity", + "4128": "Service Subscription Not Established", + "4130": "Discovery Limit Exceeded", + "4131": "Ontology Mapping Algorithm Not Available", + "4132": "Ontology Mapping Policy Not Matched", + "4133": "Ontology Mapping Not Available", + "4135": "Bad Fact Inputs For Reasoning", + "4136": "Bad Rule Inputs For Reasoning", + "4137": "Discovery Limit Exceeded", + "4138": "Primitive Profile Not Accessible", + "4139": "Primitive Profile Bad Request", + "4140": "Unauthorized User", + "4141": "Service Subscription Limits Exceeded", + "4142": "Invalid Process Configuration", + "4143": "Invalid SPARQL Query", + + "5000": "Internal Server Error", + "5001": "Not Implemented", + "5103": "Target Not Reachable", + "5105": "Receiver Has No Privilege", + "5106": "Already Exists", + "5107": "Remote Entity Not Reachable", + "5203": "Target Not Subscribable", + "5204": "Subscription Verification Initiation Failed", + "5205": "Subscription Host Has No Privilege", + "5206": "Non Blocking Synch Request Not Supported", + "5207": "Not Acceptable", + "5208": "Discovery Denied By IPE", + "5209": "Group Members Not Responded", + "5210": "ESPRIM Decryption Error", + "5211": "ESPRIM Encryption Error", + "5212": "SPARQL Update Error", + "5214": "Target Has No Session Capability", + "5215": "Session Is Online", + "5216": "Join Multicast Group Failed", + "5217": "Leave Multicast Group Failed", + "5218": "Triggering Disabled For Recipient", + "5219": "Unable To Replace Request", + "5220": "Unable To Recall Request", + "5221": "Cross Resource Operation Failure", + "5222": "Transaction Processing Is Incomplete", + "5230": "Ontology Mapping Algorithm Failed", + "5231": "Ontology Conversion Failed", + "5232": "Reasoning Processing Failed", + + "6003": "External Object Not Reachable", + "6005": "External Object Not Found", + "6010": "Max Number Of Member Exceeded", + "6020": "Mgmt Session Cannot Be Established", + "6021": "Mgmt Session Establishment Timeout", + "6022": "Invalid Cmdtype", + "6023": "Invalid Arguments", + "6024": "Insufficient Arguments", + "6025": "Mgmt Conversion Error", + "6026": "Mgmt Cancellation Failed", + "6028": "Already Complete", + "6029": "Mgmt Command Not Cancellable", + "6030": "External Object Not Reachable Before RQET Timeout", + "6031": "External Object Not Reachable Before OET Timeout", + "6033": "Network QoS Configuration Error", + "6034": "Requested Activity Pattern Not Permitted" }, "m2m:resultContent" : { - "evalues": [ "0..12" ] + "0": "Nothing", + "1": "Attributes", + "2": "Hierarchical address", + "3": "Hierarchical address and attributes", + "4": "Attributes and child resources", + "5": "Attributes and child resource references", + "6": "Child resource references", + "7": "Original resource", + "8": "Child resources", + "9": "Modified attributes", + "10": "Semantic content", + "11": "Semantic content and child resources", + "12": "Permissions" }, "m2m:semanticFormat" : { - "evalues" : [ "1..7" ] + "1": "IRI", + "2": "Functional-style", + "3": "OWL/XML", + "4": "RDF/XML", + "5": "RDF/Turtle", + "6": "Manchester", + "7": "JSON-LD" }, "m2m:stationaryIndication" : { - "evalues" : [ 1, 2 ] + "1": "Stationary", + "2": "Mobile (Moving)" }, + "m2m:status" : { - "evalues" : [ "0..3" ] + "0": "Uninitialized", + "1": "Successful", + "2": "Failure", + "3": "In Process" }, "m2m:suid" : { - "evalues" : [ "10..15", "21..25", "32..35", "40..45" ] + "10": "A pre-provisioned symmetric key intended to be shared with a MEF", + "11": "A pre-provisioned symmetric key intended to be shared with a MAF", + "12": "A pre-provisioned symmetric key intended for use in a Security Associated Establishment Framework (SAEF)", + "13": "A pre-provisioned symmetric key intended for use in End-to-End Security of Primitives (ESPrim)", + "14": "A pre-provisioned symmetric key intended for use with authenticated encryption in the Encryption-only or Nested Sign-then-Encrypt End-to-End Security of Data (ESData) Data classes", + "15": "A pre-provisioned symmetric key intended for use in Signature-only ESData Security Class", + + "21": "A symmetric key, provisioned via a Remote Security Provisioning Framework (RSPF), and intended to be shared with a MAF", + "22": "A symmetric key, provisioned via a RSPF, and intended for use in a SAEF", + "23": "A symmetric key, provisioned via a RSPF, and intended for use in ESPrim", + "24": "A symmetric key, provisioned via a RSPF, and intended for use with authenticated encryption in the Encryption-only or Nested Sign-then-Encrypt ESData) Data classes", + "25": "A symmetric key, provisioned via a RSPF, and intended for use in Signature-only ESData Security Class", + + "32": "A MAF-distributed symmetric key intended for use in a SAEF", + "33": "A MAF-distributed symmetric key intended for use in ESPrim", + "34": "A MAF-distributed symmetric key intended for use with authenticated encryption in the Encryption-only or Nested Sign-then-Encrypt ESData Data classes", + "35": "A MAF-distributed symmetric key intended for use in Signature-only ESData Security Class", + + "40": "A certificate intended to be shared with a MEF", + "41": "A certificate intended to be shared with a MAF", + "42": "A certificate intended for use in a Security Associated Establishment Framework (SAEF)", + "43": "A certificate intended for use in End-to-End Security of Primitives (ESPrim)", + "44": "A certificate intended for use with authenticated encryption in the Encryption-only or Nested Sign-then-Encrypt End-to-End Security of Data (ESData) Data classes", + "45": "A certificate intended for use in Signature-only ESData Security Class" }, "m2m:timeWindowType" : { - "evalues" : [ 1, 2 ] + "1": "Periodic Window", + "2": "Sliding Window" }, "dcfg:wifiConnectionStatus" : { - "evalues" : [ "0..6" ] + "0": "Disconnected", + "1": "Connected", + "2": "Idle", + "3": "No SSID available", + "4": "Scan completed", + "5": "Failed", + "6": "Lost" }, "dcfg:wifiEncryptionType" : { - "evalues" : [ "1..8" ] + "1": "None", + "2": "WEP", + "3": "WPA Personal", + "4": "WPA2 Personal", + "5": "WPA3 Personal", + "6": "WPA Enterprise", + "7": "WPA2 Enterprise", + "8": "WPA3 Enterprise" } } + diff --git a/init/utReset.as b/init/system.scripts/utReset.as similarity index 67% rename from init/utReset.as rename to init/system.scripts/utReset.as index 2bd71dd0..df78f307 100644 --- a/init/utReset.as +++ b/init/system.scripts/utReset.as @@ -14,9 +14,12 @@ (print "Resetting CSE") +(if (runs-in-tui) + (tui-notify "Resetting CSE" "CSE Reset" "warning")) + (reset-cse) +(print "CSE Reset Complete") (if (runs-in-tui) - (print "[green3 b]CSE Reset Complete") - (print "CSE Reset Complete")) + (tui-notify "CSE Reset Complete" "CSE Reset" "warning")) diff --git a/init/utStatus.as b/init/system.scripts/utStatus.as similarity index 100% rename from init/utStatus.as rename to init/system.scripts/utStatus.as diff --git a/init/testCaseEnd.as b/init/testing.scripts/testCaseEnd.as similarity index 91% rename from init/testCaseEnd.as rename to init/testing.scripts/testCaseEnd.as index 406977c2..86866c3a 100644 --- a/init/testCaseEnd.as +++ b/init/testing.scripts/testCaseEnd.as @@ -12,6 +12,8 @@ (if (< argc 2) ( (log-error "Wrong number of arguments: testCaseEnd ") (quit-with-error))) +(if (== (get-loglevel) "OFF") + (quit)) ;; Print start line to the debug log (log-divider "End of ${(argv 1)}") diff --git a/init/testCaseStart.as b/init/testing.scripts/testCaseStart.as similarity index 83% rename from init/testCaseStart.as rename to init/testing.scripts/testCaseStart.as index 4f89b7d0..48648944 100644 --- a/init/testCaseStart.as +++ b/init/testing.scripts/testCaseStart.as @@ -13,6 +13,10 @@ ( (logError "Wrong number of arguments: testCaseStart ") (quit-with-error))) +(if (== (get-loglevel) "OFF") + (quit)) + ;; Print start line to the debug log (log-divider "Start of ${(argv 1)}") +;;(tui-notify (argv 1) "Running Test Case") diff --git a/init/testsDisableShortRequestExpiration.as b/init/testing.scripts/testsDisableShortRequestExpiration.as similarity index 100% rename from init/testsDisableShortRequestExpiration.as rename to init/testing.scripts/testsDisableShortRequestExpiration.as diff --git a/init/testsDisableShortResourceExpiration.as b/init/testing.scripts/testsDisableShortResourceExpiration.as similarity index 100% rename from init/testsDisableShortResourceExpiration.as rename to init/testing.scripts/testsDisableShortResourceExpiration.as diff --git a/init/testsEnableShortRequestExpiration.as b/init/testing.scripts/testsEnableShortRequestExpiration.as similarity index 100% rename from init/testsEnableShortRequestExpiration.as rename to init/testing.scripts/testsEnableShortRequestExpiration.as diff --git a/init/testsEnableShortResourceExpiration.as b/init/testing.scripts/testsEnableShortResourceExpiration.as similarity index 100% rename from init/testsEnableShortResourceExpiration.as rename to init/testing.scripts/testsEnableShortResourceExpiration.as diff --git a/init/utilities.scripts/utilAttributeInfo.as b/init/utilities.scripts/utilAttributeInfo.as new file mode 100644 index 00000000..2ed682d1 --- /dev/null +++ b/init/utilities.scripts/utilAttributeInfo.as @@ -0,0 +1,19 @@ +@name Attribute Info Search +@tuiTool +@category Utilities +@tuiInput Attribute +@tuiExecuteButton Search +@description ## Attribute Info Search\n\nThis tool provides fuzzy searches for an attribute name or short name, and prints out the attribute(s) information.\n\n*Note, that some scalar types are mapped to a more general type, such as "string"*. + + +(clear-console) + +(if (!= argc 2) + ((print "[red]Add a single identifier without spaces") + (quit))) + +(dolist (attribute (cse-attribute-infos (argv 1))) + ((print "[dodger_blue2]attribute = " (nth 1 attribute)) + (print "[dark_orange]short name = " (nth 0 attribute)) + (print "type = " (nth 2 attribute) nl))) + diff --git a/mypy.ini b/mypy.ini index e6d844a1..ca43e87f 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1,5 +1,5 @@ [mypy] -python_version = 3.8 +python_version = 3.10 #mypy_path = acme files = acme/__main__.py,tests/*.py,tools/notificationServer/notificationServer.py disallow_untyped_calls = true @@ -26,5 +26,8 @@ ignore_missing_imports = True [mypy-InquirerPy.*] ignore_missing_imports = True +[mypy-shapely.*] +ignore_missing_imports = True + [mypy-plotext.*] ignore_missing_imports = True diff --git a/requirements.txt b/requirements.txt index c6d2eaed..45c27719 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,25 +4,25 @@ # # pip-compile # -blinker==1.6.2 +blinker==1.6.3 # via flask -cbor2==5.4.6 +cbor2==5.5.0 # via ACME-oneM2M-CSE (setup.py) certifi==2023.7.22 # via requests -charset-normalizer==3.1.0 +charset-normalizer==3.3.0 # via requests -click==8.1.3 +click==8.1.7 # via flask -flask==2.3.2 +flask==3.0.0 # via # ACME-oneM2M-CSE (setup.py) # flask-cors -flask-cors==3.0.10 +flask-cors==4.0.0 # via ACME-oneM2M-CSE (setup.py) idna==3.4 # via requests -importlib-metadata==6.7.0 +importlib-metadata==6.8.0 # via textual inquirerpy==0.3.4 # via ACME-oneM2M-CSE (setup.py) @@ -36,7 +36,7 @@ jinja2==3.1.2 # via flask linkify-it-py==2.0.2 # via markdown-it-py -markdown-it-py[linkify,plugins]==2.2.0 +markdown-it-py[linkify,plugins]==3.0.0 # via # mdit-py-plugins # rich @@ -49,43 +49,55 @@ mdit-py-plugins==0.4.0 # via markdown-it-py mdurl==0.1.2 # via markdown-it-py +numpy==1.26.1 + # via shapely paho-mqtt==1.6.1 # via ACME-oneM2M-CSE (setup.py) pfzy==0.3.4 # via inquirerpy plotext==5.2.8 - # via ACME-oneM2M-CSE (setup.py) -prompt-toolkit==3.0.38 + # via + # ACME-oneM2M-CSE (setup.py) + # textual-plotext +prompt-toolkit==3.0.39 # via inquirerpy -pygments==2.15.1 +pygments==2.16.1 # via rich -pyparsing==3.1.0 +pyparsing==3.1.1 # via rdflib -rdflib==6.3.2 +python3-dtls==1.3.0 + # via ACME-oneM2M-CSE (setup.py) +rdflib==7.0.0 # via ACME-oneM2M-CSE (setup.py) requests==2.31.0 # via ACME-oneM2M-CSE (setup.py) -rich==13.4.2 +rich==13.6.0 # via # ACME-oneM2M-CSE (setup.py) # textual +shapely==2.0.2 + # via ACME-oneM2M-CSE (setup.py) six==1.16.0 + # via isodate +textual==0.40.0 # via - # flask-cors - # isodate -textual==0.28.1 + # ACME-oneM2M-CSE (setup.py) + # textual-plotext +textual-plotext==0.1.0 # via ACME-oneM2M-CSE (setup.py) tinydb==4.8.0 # via ACME-oneM2M-CSE (setup.py) -typing-extensions==4.6.3 +typing-extensions==4.8.0 # via textual uc-micro-py==1.0.2 # via linkify-it-py urllib3==2.0.7 # via requests -wcwidth==0.2.6 +waitress==2.1.2 + # via ACME-oneM2M-CSE (setup.py) +wcwidth==0.2.8 # via prompt-toolkit -werkzeug==2.3.6 +werkzeug==3.0.0 # via flask -zipp==3.15.0 +zipp==3.17.0 # via importlib-metadata diff --git a/setup.py b/setup.py index fc3da2f7..aa6aad76 100644 --- a/setup.py +++ b/setup.py @@ -34,11 +34,15 @@ 'isodate', 'paho-mqtt', 'plotext', + 'python3-dtls', 'rdflib', 'requests', 'rich', + 'shapely', 'textual', + 'textual-plotext', 'tinydb', + 'waitress', ], entry_points={ 'console_scripts': [ diff --git a/tests/config.py b/tests/config.py index ec71e3ee..2f1c91d2 100644 --- a/tests/config.py +++ b/tests/config.py @@ -9,26 +9,24 @@ BINDING = 'http' # possible values: http, https, mqtt -if BINDING == 'mqtt': - PROTOCOL = 'mqtt' - CONFIGPROTOCOL = 'http' - NOTIFICATIONPROTOCOL = 'http' - REMOTEPROTOCOL = 'http' - -elif BINDING == 'http': - PROTOCOL = 'http' - CONFIGPROTOCOL = 'http' - NOTIFICATIONPROTOCOL = 'http' - REMOTEPROTOCOL = 'http' - -elif BINDING == 'https': - PROTOCOL = 'https' - CONFIGPROTOCOL = 'https' - NOTIFICATIONPROTOCOL = 'http' - REMOTEPROTOCOL = 'http' - -else: - assert False, 'Supported values for BINDING are "mqtt", "http", and "https"' +match BINDING: + case 'mqtt': + PROTOCOL = 'mqtt' + CONFIGPROTOCOL = 'http' + NOTIFICATIONPROTOCOL = 'http' + REMOTEPROTOCOL = 'http' + case 'http': + PROTOCOL = 'http' + CONFIGPROTOCOL = 'http' + NOTIFICATIONPROTOCOL = 'http' + REMOTEPROTOCOL = 'http' + case 'https': + PROTOCOL = 'https' + CONFIGPROTOCOL = 'https' + NOTIFICATIONPROTOCOL = 'http' + REMOTEPROTOCOL = 'http' + case _: + assert False, 'Supported values for BINDING are "mqtt", "http", and "https"' # TODO ENCODING = @@ -75,6 +73,7 @@ MQTTREGRESPONSETOPIC= f'/oneM2M/reg_resp/{mqttClientID}{CSEID}/json' +############################################################################## # # OAuth2 authentication @@ -87,6 +86,21 @@ oauthClientSecret = '' +# +# HTTP Basic authentication +# + +doHttpBasicAuth = False +httpUserName = 'test' +httpPassword = 'testPassword' + +# +# HTTP Token authentication +# +doHttpTokenAuth = False +httpAuthToken = 'testToken' + + # # Remote CSE # For testing remote CSE registrations diff --git a/tests/init.py b/tests/init.py index d7dacf1b..f538df70 100755 --- a/tests/init.py +++ b/tests/init.py @@ -11,7 +11,7 @@ from typing import Any, Callable, Tuple, cast, Optional from urllib.parse import ParseResult, urlparse, parse_qs -import sys, io, atexit +import sys, io, atexit, base64 import unittest from rich.console import Console @@ -222,11 +222,13 @@ def isRaspberrypi() -> bool: crsRN = 'testCRS' csrRN = 'testCSR' deprRN = 'testDEPR' -grpRN = 'testGRP' fcntRN = 'testFCNT' +grpRN = 'testGRP' +lcpRN = 'testLCP' nodRN = 'testNOD' pchRN = 'testPCH' reqRN = 'testREQ' +schRN = 'testSCH' smdRN = 'testSMD' subRN = 'testSUB' tsRN = 'testTS' @@ -246,6 +248,7 @@ def isRaspberrypi() -> bool: csrURL = f'{cseURL}/{csrRN}' fcntURL = f'{aeURL}/{fcntRN}' grpURL = f'{aeURL}/{grpRN}' +lcpURL = f'{aeURL}/{lcpRN}' # under the nodURL = f'{cseURL}/{nodRN}' # under the pchURL = f'{aeURL}/{pchRN}' pcuURL = f'{pchURL}/pcu' @@ -335,34 +338,54 @@ def sendRequest(operation:Operation, url:str, originator:str, ty:ResourceTypes=N # return sendHttpRequest(requests.delete, url=url, originator=originator, ty=ty, data=data, ct=ct, timeout=timeout, headers=headers) # elif operation == Operation.NOTIFY: # return sendHttpRequest(requests.post, url=url, originator=originator, ty=ty, data=data, ct=ct, timeout=timeout, headers=headers) - if operation == Operation.CREATE: - return sendHttpRequest(httpSession.post, url=url, originator=originator, ty=ty, data=data, ct=ct, timeout=timeout, headers=headers) - elif operation == Operation.RETRIEVE: - return sendHttpRequest(httpSession.get, url=url, originator=originator, ty=ty, data=data, ct=ct, timeout=timeout, headers=headers) - elif operation == Operation.UPDATE: - return sendHttpRequest(httpSession.put, url=url, originator=originator, ty=ty, data=data, ct=ct, timeout=timeout, headers=headers) - elif operation == Operation.DELETE: - return sendHttpRequest(httpSession.delete, url=url, originator=originator, ty=ty, data=data, ct=ct, timeout=timeout, headers=headers) - elif operation == Operation.NOTIFY: - return sendHttpRequest(httpSession.post, url=url, originator=originator, ty=ty, data=data, ct=ct, timeout=timeout, headers=headers) + match operation: + case Operation.CREATE: + return sendHttpRequest(requests.post, url=url, originator=originator, ty=ty, data=data, ct=ct, timeout=timeout, headers=headers) + case Operation.RETRIEVE: + return sendHttpRequest(requests.get, url=url, originator=originator, ty=ty, data=data, ct=ct, timeout=timeout, headers=headers) + case Operation.UPDATE: + return sendHttpRequest(requests.put, url=url, originator=originator, ty=ty, data=data, ct=ct, timeout=timeout, headers=headers) + case Operation.DELETE: + return sendHttpRequest(requests.delete, url=url, originator=originator, ty=ty, data=data, ct=ct, timeout=timeout, headers=headers) + case Operation.NOTIFY: + return sendHttpRequest(requests.post, url=url, originator=originator, ty=ty, data=data, ct=ct, timeout=timeout, headers=headers) + elif url.startswith('mqtt'): - if operation == Operation.CREATE: - return sendMqttRequest(Operation.CREATE, url=url, originator=originator, ty=ty, data=data, ct=ct, timeout=timeout, headers=headers) - elif operation == Operation.RETRIEVE: - return sendMqttRequest(Operation.RETRIEVE, url=url, originator=originator, ty=ty, data=data, ct=ct, timeout=timeout, headers=headers) - elif operation == Operation.UPDATE: - return sendMqttRequest(Operation.UPDATE, url=url, originator=originator, ty=ty, data=data, ct=ct, timeout=timeout, headers=headers) - elif operation == Operation.DELETE: - return sendMqttRequest(Operation.DELETE, url=url, originator=originator, ty=ty, data=data, ct=ct, timeout=timeout, headers=headers) - elif operation == Operation.NOTIFY: - return sendMqttRequest(Operation.NOTIFY, url=url, originator=originator, ty=ty, data=data, ct=ct, timeout=timeout, headers=headers) + match operation: + case Operation.CREATE: + return sendMqttRequest(Operation.CREATE, url=url, originator=originator, ty=ty, data=data, ct=ct, timeout=timeout, headers=headers) + case Operation.RETRIEVE: + return sendMqttRequest(Operation.RETRIEVE, url=url, originator=originator, ty=ty, data=data, ct=ct, timeout=timeout, headers=headers) + case Operation.UPDATE: + return sendMqttRequest(Operation.UPDATE, url=url, originator=originator, ty=ty, data=data, ct=ct, timeout=timeout, headers=headers) + case Operation.DELETE: + return sendMqttRequest(Operation.DELETE, url=url, originator=originator, ty=ty, data=data, ct=ct, timeout=timeout, headers=headers) + case Operation.NOTIFY: + return sendMqttRequest(Operation.NOTIFY, url=url, originator=originator, ty=ty, data=data, ct=ct, timeout=timeout, headers=headers) + else: print('ERROR') return None, 5103 +def addHttpAuthorizationHeader(headers:Parameters) -> Optional[Tuple[str, int]]: + global oauthToken + + if doOAuth: + if (token := OAuth.getOAuthToken(oauthServerUrl, oauthClientID, oauthClientSecret, oauthToken)) is None: + return 'error retrieving oauth token', 5103 + oauthToken = token + headers['Authorization'] = f'Bearer {oauthToken.token}' + elif doHttpBasicAuth: + _t = f'{httpUserName}:{httpPassword}' + headers['Authorization'] = f'Basic {base64.b64encode(_t.encode("utf-8")).decode("utf-8")}' + elif doHttpTokenAuth: + headers['Authorization'] = f'Bearer {httpAuthToken}' + return None + + def sendHttpRequest(method:Callable, url:str, originator:str, ty:ResourceTypes=None, data:JSON|str=None, ct:str=None, timeout:float=None, headers:Parameters=None) -> Tuple[STRING|JSON, int]: # type: ignore # TODO Constants - global oauthToken, httpSession + global httpSession # correct url url = RequestUtils.toHttpUrl(url) @@ -398,11 +421,19 @@ def sendHttpRequest(method:Callable, url:str, originator:str, ty:ResourceTypes=N hds.update(headers) # authentication - if doOAuth: - if (token := OAuth.getOAuthToken(oauthServerUrl, oauthClientID, oauthClientSecret, oauthToken)) is None: - return 'error retrieving oauth token', 5103 - oauthToken = token - hds['Authorization'] = f'Bearer {oauthToken.token}' + if (_r := addHttpAuthorizationHeader(hds)) is not None: + return _r + + # if doOAuth: + # if (token := OAuth.getOAuthToken(oauthServerUrl, oauthClientID, oauthClientSecret, oauthToken)) is None: + # return 'error retrieving oauth token', 5103 + # oauthToken = token + # hds['Authorization'] = f'Bearer {oauthToken.token}' + # elif doHttpBasicAuth: + # _t = f'{httpUserName}:{httpPassword}' + # hds['Authorization'] = f'Basic {base64.b64encode(_t.encode("utf-8")).decode("utf-8")}' + # elif doHttpTokenAuth: + # hds['Authorization'] = f'Bearer aRandomToken' # Verbose output if verboseRequests: @@ -657,7 +688,9 @@ def enableShortResourceExpirations() -> None: global _orgExpCheck, _maxExpiration, _tooLargeResourceExpirationDelta # Send UT request - resp = requests.post(UTURL, headers = { UTCMD: f'enableShortResourceExpiration {expirationCheckDelay}'}) + headers = { UTCMD: f'enableShortResourceExpiration {expirationCheckDelay}'} + addHttpAuthorizationHeader(headers) + resp = requests.post(UTURL, headers = headers) _maxExpiration = -1 _orgExpCheck = -1 if resp.status_code == 200: @@ -677,7 +710,9 @@ def disableShortResourceExpirations() -> None: global _orgExpCheck, _orgREQExpCheck if _orgExpCheck != -1: # Send UT request - resp = requests.post(UTURL, headers = { UTCMD: f'disableShortResourceExpiration'}) + headers = { UTCMD: f'disableShortResourceExpiration'} + addHttpAuthorizationHeader(headers) + resp = requests.post(UTURL, headers = headers) if resp.status_code == 200: _orgExpCheck = -1 _orgREQExpCheck = -1 @@ -709,7 +744,9 @@ def enableShortRequestExpirations() -> None: global _orgRequestExpirationDelta # Send UT request - resp = requests.post(UTURL, headers = { UTCMD: f'enableShortRequestExpiration {requestExpirationDelay}'}) + headers = { UTCMD: f'enableShortRequestExpiration {requestExpirationDelay}'} + addHttpAuthorizationHeader(headers) + resp = requests.post(UTURL, headers = headers) if resp.status_code == 200: if UTRSP in resp.headers: _orgRequestExpirationDelta = float(resp.headers[UTRSP]) @@ -721,7 +758,9 @@ def disableShortRequestExpirations() -> None: global _orgRequestExpirationDelta # Send UT request - resp = requests.post(UTURL, headers = { UTCMD: f'disableShortRequestExpiration'}) + headers = { UTCMD: f'disableShortRequestExpiration'} + addHttpAuthorizationHeader(headers) + resp = requests.post(UTURL, headers = headers) if resp.status_code == 200: _orgRequestExpirationDelta = -1.0 @@ -742,7 +781,9 @@ def testCaseStart(name:str) -> None: name: Name of the test case. """ if UPPERTESTERENABLED: - requests.post(UTURL, headers = { UTCMD: f'testCaseStart {name}'}) + headers = { UTCMD: f'testCaseStart {name}'} + addHttpAuthorizationHeader(headers) + requests.post(UTURL, headers = headers) if verboseRequests: console.print('') ln = '=' * int((console.width - 11 - len(name)) / 2) @@ -757,7 +798,9 @@ def testCaseEnd(name:str) -> None: name: Name of the test case. """ if UPPERTESTERENABLED: - requests.post(UTURL, headers = { UTCMD: f'testCaseEnd {name}'}) + headers = { UTCMD: f'testCaseEnd {name}'} + addHttpAuthorizationHeader(headers) + requests.post(UTURL, headers = headers) if verboseRequests: console.print('') ln = '=' * int((console.width - 9 - len(name)) / 2) @@ -799,12 +842,11 @@ def do_POST(self) -> None: contentType = '' if (val := self.headers.get('Content-Type')) is not None: contentType = val.lower() - if contentType in [ 'application/json', 'application/vnd.onem2m-res+json' ]: - setLastNotification(decoded_data := json.loads(post_data.decode('utf-8'))) - elif contentType in [ 'application/cbor', 'application/vnd.onem2m-res+cbor' ]: - setLastNotification(decoded_data := cbor2.loads(post_data)) - # else: - # setLastNotification(post_data.decode('utf-8')) + match contentType: + case 'application/json' | 'application/vnd.onem2m-res+json': + setLastNotification(decoded_data := json.loads(post_data.decode('utf-8'))) + case 'application/cbor' | 'application/vnd.onem2m-res+cbor': + setLastNotification(decoded_data := cbor2.loads(post_data)) setLastNotificationHeaders(dict(self.headers)) # make a dict out of the headers # make a dict out of the query arguments @@ -1109,10 +1151,20 @@ def findXPath(dct:JSON, key:str, default:Any=None) -> Any: if UPPERTESTERENABLED: try: - if requests.post(UTURL, headers = { UTCMD: f'Status'}).status_code != 200: - console.print('[red]Upper Tester Interface not enabeled in CSE') - console.print('Enable with configuration setting: "\[http]:enableUpperTesterEndpoint=True"') - quit(-1) + headers = { UTCMD: f'Status'} + addHttpAuthorizationHeader(headers) + response = requests.post(UTURL, headers = headers) + match response.status_code: + case 200: + pass + case 401: + console.print('[red]CSE requires authorization') + console.print('Add authorization settings to the test suite configuration file') + quit(-1) + case _: + console.print('[red]Upper Tester Interface not enabeled in CSE') + console.print('Enable with configuration setting: "\[http]:enableUpperTesterEndpoint=True"') + quit(-1) except (ConnectionRefusedError, requests.exceptions.ConnectionError): console.print('[red]Connection to CSE not possible[/red]\nIs it running?') quit(-1) diff --git a/tests/testCRS.py b/tests/testCRS.py index 2e56f7e0..340327c5 100644 --- a/tests/testCRS.py +++ b/tests/testCRS.py @@ -336,9 +336,7 @@ def test_createCRSwithRratSlidingStatsEnabled(self) -> None: self.assertEqual(rrats[1], self._testSubscriptionForCnt(cntRN2)) self._testSubscriptionForCnt(cntRN3, False) self.assertTrue(findXPath(TestCRS.crs, 'm2m:crs/nse')) - self.assertIsNotNone(findXPath(TestCRS.crs, 'm2m:crs/nsi')) - self.assertEqual(len(findXPath(TestCRS.crs, 'm2m:crs/nsi')), 1) - self.assertEqual(findXPath(TestCRS.crs, 'm2m:crs/nsi/{0}/tg'), TestCRS.originator) + self.assertIsNone(findXPath(TestCRS.crs, 'm2m:crs/nsi')) @unittest.skipIf(noCSE, 'No CSEBase') @@ -770,7 +768,7 @@ def test_updateCRSPeriodicWindowSize(self) -> None: self.assertIsNone(notification := getLastNotification()) # wait second half - testSleep(crsTimeWindowSize) + testSleep(crsTimeWindowSize * 1.2) self.assertIsNotNone(notification := getLastNotification()) self.assertIsNotNone(findXPath(notification, 'm2m:sgn')) self.assertEqual(findXPath(notification, 'm2m:sgn/sur'), toSPRelative(findXPath(self.crs, 'm2m:crs/ri'))) @@ -840,15 +838,13 @@ def test_updateCRSSlidingWindowSize(self) -> None: @unittest.skipIf(noCSE, 'No CSEBase') - def test_retrieveCRSwithNSE(self) -> None: - """ RETRIEVE """ + def test_retrieveCRSwithNSENSINone(self) -> None: + """ RETRIEVE with NSE set to True and no NSI""" TestCRS.crs, rsc = RETRIEVE(crsURL, TestCRS.originator) self.assertEqual(rsc, RC.OK, TestCRS.crs) self.assertTrue(findXPath(TestCRS.crs, 'm2m:crs/nse')) - self.assertIsNotNone(findXPath(TestCRS.crs, 'm2m:crs/nsi')) - self.assertEqual(len(findXPath(TestCRS.crs, 'm2m:crs/nsi')), 1) - self.assertEqual(findXPath(TestCRS.crs, 'm2m:crs/nsi/{0}/tg'), TestCRS.originator) + self.assertIsNone(findXPath(TestCRS.crs, 'm2m:crs/nsi')) @unittest.skipIf(noCSE, 'No CSEBase') @@ -881,11 +877,7 @@ def test_testEmptyNsi(self) -> None: TestCRS.crs, rsc = RETRIEVE(crsURL, TestCRS.originator) self.assertEqual(rsc, RC.OK, TestCRS.crs) self.assertTrue(findXPath(TestCRS.crs, 'm2m:crs/nse')) - self.assertEqual(len( nsi := findXPath(TestCRS.crs, 'm2m:crs/nsi')), 1, TestCRS.crs) - self.assertEqual(findXPath(nsi, '{0}/tg'), TestCRS.originator, TestCRS.crs) - self.assertEqual(findXPath(nsi, '{0}/rqs'), 0) - self.assertEqual(findXPath(nsi, '{0}/rsr'), 0) - self.assertEqual(findXPath(nsi, '{0}/noec'), 0) + self.assertIsNone(findXPath(TestCRS.crs, 'm2m:crs/nsi'), TestCRS.crs) @unittest.skipIf(noCSE, 'No CSEBase') @@ -898,12 +890,8 @@ def test_updateCRSwithEnableNSE(self) -> None: TestCRS.crs, rsc = UPDATE(crsURL, TestCRS.originator, dct) self.assertEqual(rsc, RC.UPDATED, TestCRS.crs) self.assertIsNotNone(findXPath(TestCRS.crs, 'm2m:crs/nse'), TestCRS.crs) - self.assertIsNotNone(findXPath(TestCRS.crs, 'm2m:crs/nsi'), TestCRS.crs) - self.assertEqual(len( nsi := findXPath(TestCRS.crs, 'm2m:crs/nsi')), 1, TestCRS.crs) - self.assertEqual(findXPath(nsi, '{0}/tg'), TestCRS.originator, TestCRS.crs) - self.assertEqual(findXPath(nsi, '{0}/rqs'), 0) - self.assertEqual(findXPath(nsi, '{0}/rsr'), 0) - self.assertEqual(findXPath(nsi, '{0}/noec'), 0) + # nsi must be empty + self.assertIsNone(findXPath(TestCRS.crs, 'm2m:crs/nsi'), TestCRS.crs) @unittest.skipIf(noCSE, 'No CSEBase') @@ -941,11 +929,9 @@ def test_updateCRSwithNseTrue(self) -> None: TestCRS.crs, rsc = UPDATE(crsURL, TestCRS.originator, dct) self.assertEqual(rsc, RC.UPDATED, TestCRS.crs) - self.assertTrue(findXPath(TestCRS.crs, 'm2m:crs/nse')) - self.assertEqual(len(findXPath(TestCRS.crs, 'm2m:crs/nsi')), 1, TestCRS.crs) - self.assertEqual(findXPath(TestCRS.crs, 'm2m:crs/nsi/{0}/rqs'), 0, TestCRS.crs) - self.assertEqual(findXPath(TestCRS.crs, 'm2m:crs/nsi/{0}/rsr'), 0, TestCRS.crs) - self.assertEqual(findXPath(TestCRS.crs, 'm2m:crs/nsi/{0}/noec'), 0, TestCRS.crs) + self.assertTrue(findXPath(TestCRS.crs, 'm2m:crs/nse', TestCRS.crs)) + # nsi must be empty + self.assertIsNone(findXPath(TestCRS.crs, 'm2m:sub/nsi'), TestCRS.crs) ######################################################################### @@ -1433,7 +1419,6 @@ def test_createCRSPeriodicAllSomeEventsMissingSome(self) -> None: @unittest.skipIf(noCSE, 'No CSEBase') def test_createCRSPeriodicAllSomeEventsMissingNone(self) -> None: """ CREATE with rrat, one encs, periodic window, all or some events missing, no event""" - clearLastNotification() dct = { 'm2m:crs' : { 'rn' : crsRN, 'nu' : [ '/id-in/'+TestCRS.originator ], @@ -1458,6 +1443,7 @@ def test_createCRSPeriodicAllSomeEventsMissingNone(self) -> None: # Create NO CIN # wait and check notification + clearLastNotification() testSleep(crsTimeWindowSize + 1.0) self.assertIsNotNone(notification := getLastNotification()) self.assertIsNotNone(findXPath(notification, 'm2m:sgn')) @@ -2048,7 +2034,7 @@ def run(testFailFast:bool) -> Tuple[int, int, int, float]: # Test Notification Stats addTest(suite, TestCRS('test_createCRSwithRratSlidingStatsEnabled')) # Sliding - addTest(suite, TestCRS('test_retrieveCRSwithNSE')) + addTest(suite, TestCRS('test_retrieveCRSwithNSENSINone')) addTest(suite, TestCRS('test_createTwoNotificationOneNotification')) addTest(suite, TestCRS('test_deleteCRSwithRrat')) diff --git a/tests/testLCP.py b/tests/testLCP.py new file mode 100644 index 00000000..39d1c8fb --- /dev/null +++ b/tests/testLCP.py @@ -0,0 +1,383 @@ +# +# testLCP.py +# +# (c) 2023 by Andreas Kraft +# License: BSD 3-Clause License. See the LICENSE file for further details. +# +# Unit tests for LocationPolicy functionality +# + +import unittest, sys +if '..' not in sys.path: + sys.path.append('..') +from typing import Tuple +from acme.etc.Types import ResourceTypes as T, ResponseStatusCode as RC, TimeWindowType +from acme.etc.Types import NotificationEventType, NotificationEventType as NET +from init import * + + +pointInside = { + 'type' : 'Point', + 'coordinates' : [ 52.520817, 13.409446 ] +} + +pointInsideStr = json.dumps(pointInside) + +pointOutside = { + 'type' : 'Point', + 'coordinates' : [ 52.505033, 13.278189 ] +} +pointOutsideStr = json.dumps(pointOutside) + +targetPoligon = { + 'type' : 'Polygon', + 'coordinates' : [ + [ [52.522423, 13.409468], [52.520634, 13.412107], [52.518362, 13.407172], [52.520086, 13.404897] ] + ] +} +targetPoligonStr = json.dumps(targetPoligon) + +# TODO wrong poligon, wrong point + + +class TestLCP(unittest.TestCase): + + ae = None + aeRI = None + ae2 = None + nod = None + nodRI = None + crs = None + crsRI = None + + + originator = None + + @classmethod + @unittest.skipIf(noCSE, 'No CSEBase') + def setUpClass(cls) -> None: + testCaseStart('Setup TestLCP') + + # Start notification server + startNotificationServer() + + dct = { 'm2m:ae' : { + 'rn' : aeRN, + 'api' : APPID, + 'rr' : True, + 'srv' : [ RELEASEVERSION ] + }} + cls.ae, rsc = CREATE(cseURL, 'C', T.AE, dct) # AE to work under + assert rsc == RC.CREATED, 'cannot create parent AE' + cls.originator = findXPath(cls.ae, 'm2m:ae/aei') + cls.aeRI = findXPath(cls.ae, 'm2m:ae/ri') + + testCaseEnd('Setup TestLCP') + + + @classmethod + @unittest.skipIf(noCSE, 'No CSEBase') + def tearDownClass(cls) -> None: + if not isTearDownEnabled(): + stopNotificationServer() + return + testCaseStart('TearDown TestLCP') + DELETE(aeURL, ORIGINATOR) # Just delete the AE and everything below it. Ignore whether it exists or not + testCaseEnd('TearDown TestLCP') + + + def setUp(self) -> None: + testCaseStart(self._testMethodName) + + + def tearDown(self) -> None: + testCaseEnd(self._testMethodName) + + ######################################################################### + + @unittest.skipIf(noCSE, 'No CSEBase') + def test_createLCPMissingLosFail(self) -> None: + """ CREATE invalid with missing los -> Fail""" + + dct = { 'm2m:lcp': { + 'rn': lcpRN, + 'lou': [ 'PT5S' ], + 'lon': 'myLocationContainer' + }} + r, rsc = CREATE(aeURL, self.originator, T.LCP, dct) + self.assertEqual(rsc, RC.BAD_REQUEST, r) + + + @unittest.skipIf(noCSE, 'No CSEBase') + def test_createMinimalLCP(self) -> None: + """ CREATE minimal with missing lou""" + + dct = { 'm2m:lcp': { + 'rn': lcpRN, + 'los': 2, # device based + }} + r, rsc = CREATE(aeURL, self.originator, T.LCP, dct) + self.assertEqual(rsc, RC.CREATED, r) + self.assertIsNotNone(findXPath(r, 'm2m:lcp/lost')) + self.assertEqual(findXPath(r, 'm2m:lcp/lost'), '') + + _, rsc = DELETE(lcpURL, self.originator) + self.assertEqual(rsc, RC.DELETED) + + + @unittest.skipIf(noCSE, 'No CSEBase') + def test_createLCPWithSameCNTRnFail(self) -> None: + """ CREATE with assigned container RN as self -> Fail """ + + dct = { 'm2m:lcp': { + 'rn': lcpRN, + 'los': 2, # device based + 'lon': lcpRN + }} + r, rsc = CREATE(aeURL, self.originator, T.LCP, dct) + self.assertEqual(rsc, RC.BAD_REQUEST, r) + + + @unittest.skipIf(noCSE, 'No CSEBase') + def test_createLCPWithLOS2LotFail(self) -> None: + """ CREATE with los=2 (device based) and set lot -> Fail """ + + dct = { 'm2m:lcp': { + 'rn': lcpRN, + 'los': 2, # device based + 'lot': '1234' # locationTargetID + }} + r, rsc = CREATE(aeURL, self.originator, T.LCP, dct) + self.assertEqual(rsc, RC.BAD_REQUEST, r) + + + @unittest.skipIf(noCSE, 'No CSEBase') + def test_createLCPWithLOS2AidFail(self) -> None: + """ CREATE with los=2 (device based) and set aid -> Fail """ + + dct = { 'm2m:lcp': { + 'rn': lcpRN, + 'los': 2, # device based + 'aid': '1234' # authID + }} + r, rsc = CREATE(aeURL, self.originator, T.LCP, dct) + self.assertEqual(rsc, RC.BAD_REQUEST, r) + + + @unittest.skipIf(noCSE, 'No CSEBase') + def test_createLCPWithLOS2LorFail(self) -> None: + """ CREATE with los=2 (device based) and set lor -> Fail """ + + dct = { 'm2m:lcp': { + 'rn': lcpRN, + 'los': 2, # device based + 'lor': '1234' # locationServer + }} + r, rsc = CREATE(aeURL, self.originator, T.LCP, dct) + self.assertEqual(rsc, RC.BAD_REQUEST, r) + + + @unittest.skipIf(noCSE, 'No CSEBase') + def test_createLCPWithLOS2RlklFail(self) -> None: + """ CREATE with los=2 (device based) and set rlkl -> Fail """ + + dct = { 'm2m:lcp': { + 'rn': lcpRN, + 'los': 2, # device based + 'rlkl': True # retrieveLastKnownLocation + }} + r, rsc = CREATE(aeURL, self.originator, T.LCP, dct) + self.assertEqual(rsc, RC.BAD_REQUEST, r) + + + @unittest.skipIf(noCSE, 'No CSEBase') + def test_createLCPWithLOS2LuecFail(self) -> None: + """ CREATE with los=2 (device based) and set luec -> Fail """ + + dct = { 'm2m:lcp': { + 'rn': lcpRN, + 'los': 2, # device based + 'luec': 0 # locationUpdateEventCriteria + }} + r, rsc = CREATE(aeURL, self.originator, T.LCP, dct) + self.assertEqual(rsc, RC.BAD_REQUEST, r) + + + @unittest.skipIf(noCSE, 'No CSEBase') + def test_createLCPWithWrongGtaFail(self) -> None: + """ CREATE with wrong gta -> Fail""" + + dct = { 'm2m:lcp': { + 'rn': lcpRN, + 'los': 2, # device based + 'gta': 'wrong' # geoTargetArea + }} + r, rsc = CREATE(aeURL, self.originator, T.LCP, dct) + self.assertEqual(rsc, RC.BAD_REQUEST, r) + + + @unittest.skipIf(noCSE, 'No CSEBase') + def test_createLCPWithGta(self) -> None: + """ CREATE with gta """ + + dct = { 'm2m:lcp': { + 'rn': lcpRN, + 'los': 2, # device based + 'gta': targetPoligonStr # geoTargetArea + }} + r, rsc = CREATE(aeURL, self.originator, T.LCP, dct) + self.assertEqual(rsc, RC.CREATED, r) + self.assertIsNotNone(findXPath(r, 'm2m:lcp/gta')) + + _, rsc = DELETE(lcpURL, self.originator) + self.assertEqual(rsc, RC.DELETED) + + + # + # Periodic tests + # + + @unittest.skipIf(noCSE, 'No CSEBase') + def test_createLCPWithLit2Lou0(self) -> None: + """ CREATE with lit = 2, lou = 0s""" + + dct = { 'm2m:lcp': { + 'rn': lcpRN, + 'los': 2, # device based + 'lit': 2, # locationInformationType = 2 (geo-fence) + 'lou': [ 'PT0S' ] # locationUpdatePeriod = 0s, + }} + r, rsc = CREATE(aeURL, self.originator, T.LCP, dct) + self.assertEqual(rsc, RC.CREATED, r) + self.assertIsNotNone(findXPath(r, 'm2m:lcp/lost')) + self.assertEqual(findXPath(r, 'm2m:lcp/lost'), '') + + _, rsc = DELETE(lcpURL, self.originator) + self.assertEqual(rsc, RC.DELETED) + + + @unittest.skipIf(noCSE, 'No CSEBase') + def test_testPeriodicUpdates(self) -> None: + """ CREATE with lit = 2, lou = 1s""" + + dct = { 'm2m:lcp': { + 'rn': lcpRN, + 'los': 2, # device based + 'lit': 2, # locationInformationType = 2 (geo-fence) + 'lou': [ 'PT1S' ], # locationUpdatePeriod = 1s + 'lon': cntRN, # containerName + 'gta': targetPoligonStr,# geoTargetArea + 'gec': 2 # geoEventCategory = 2 (leaving). Assuming that the initial location is inside the target area + + + }} + r, rsc = CREATE(aeURL, self.originator, T.LCP, dct) + self.assertEqual(rsc, RC.CREATED, r) + self.assertIsNotNone(findXPath(r, 'm2m:lcp/lost')) + self.assertEqual(findXPath(r, 'm2m:lcp/lost'), '') + self.assertIsNotNone(findXPath(r, 'm2m:lcp/loi'), '') + + # Add a location ContentInstance + dct = { 'm2m:cin': { + 'con': pointOutsideStr + }} + r, rsc = CREATE(f'{aeURL}/{cntRN}', self.originator, T.CIN, dct) + self.assertEqual(rsc, RC.CREATED, r) + + # Just wait a moment + testSleep(2) + + # Retrieve the latest location ContentInstance to check the event + r, rsc = RETRIEVE(f'{aeURL}/{cntRN}/la', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNotNone(findXPath(r, 'm2m:cin/con')) + self.assertEqual(findXPath(r, 'm2m:cin/con'), '2', r) # leaving + + + _, rsc = DELETE(lcpURL, self.originator) + self.assertEqual(rsc, RC.DELETED) + + +# TODO add test: move from inside to inside -> no notification +# TODO add test: move from inside to outside -> notification +# TODO add test: move from outside to inside -> notification +# TODO add test: move from outside to outside -> no notification + +# TODO test with invalid location format + + + + @unittest.skipIf(noCSE, 'No CSEBase') + def test_testManualUpdates(self) -> None: + """ CREATE with lit = 2, lou = None """ + + dct = { 'm2m:lcp': { + 'rn': lcpRN, + 'los': 2, # device based + 'lit': 2, # locationInformationType = 2 (geo-fence) + 'lon': cntRN, # containerName + 'gta': targetPoligonStr,# geoTargetArea + 'gec': 2 # geoEventCategory = 2 (leaving). Assuming that the initial location is inside the target area + }} + r, rsc = CREATE(aeURL, self.originator, T.LCP, dct) + self.assertEqual(rsc, RC.CREATED, r) + self.assertIsNotNone(findXPath(r, 'm2m:lcp/lost')) + self.assertEqual(findXPath(r, 'm2m:lcp/lost'), '') + self.assertIsNotNone(findXPath(r, 'm2m:lcp/loi'), '') + + # Add a location ContentInstance + dct = { 'm2m:cin': { + 'con': pointOutsideStr + }} + r, rsc = CREATE(f'{aeURL}/{cntRN}', self.originator, T.CIN, dct) + self.assertEqual(rsc, RC.CREATED, r) + + # Retrieve + r, rsc = RETRIEVE(f'{aeURL}/{cntRN}/la', self.originator) + self.assertEqual(rsc, RC.OK, r) + latest = findXPath(r, 'm2m:cin/con') + print(latest) + + + # Just wait a moment + testSleep(2) + + # TODO receive result? + + _, rsc = DELETE(lcpURL, self.originator) + self.assertEqual(rsc, RC.DELETED) + + + + ######################################################################### + + + +def run(testFailFast:bool) -> Tuple[int, int, int, float]: + suite = unittest.TestSuite() + + # basic tests + addTest(suite, TestLCP('test_createLCPMissingLosFail')) + addTest(suite, TestLCP('test_createMinimalLCP')) + addTest(suite, TestLCP('test_createLCPWithSameCNTRnFail')) + addTest(suite, TestLCP('test_createLCPWithLOS2LotFail')) + addTest(suite, TestLCP('test_createLCPWithLOS2AidFail')) + addTest(suite, TestLCP('test_createLCPWithLOS2LorFail')) + addTest(suite, TestLCP('test_createLCPWithLOS2RlklFail')) + addTest(suite, TestLCP('test_createLCPWithLOS2LuecFail')) + addTest(suite, TestLCP('test_createLCPWithWrongGtaFail')) + addTest(suite, TestLCP('test_createLCPWithGta')) + + # periodic tests + addTest(suite, TestLCP('test_createLCPWithLit2Lou0')) + addTest(suite, TestLCP('test_testPeriodicUpdates')) + addTest(suite, TestLCP('test_testManualUpdates')) + + + result = unittest.TextTestRunner(verbosity = testVerbosity, failfast = testFailFast).run(suite) + return result.testsRun, len(result.errors + result.failures), len(result.skipped), getSleepTimeCount() + + +if __name__ == '__main__': + r, errors, s, t = run(True) + sys.exit(errors) \ No newline at end of file diff --git a/tests/testLocation.py b/tests/testLocation.py new file mode 100644 index 00000000..3ed7d85e --- /dev/null +++ b/tests/testLocation.py @@ -0,0 +1,1602 @@ +# +# testLocation.py +# +# (c) 2023 by Andreas Kraft +# License: BSD 3-Clause License. See the LICENSE file for further details. +# +# Unit tests for geo-query functionality and queries +# + +import unittest, sys +if '..' not in sys.path: + sys.path.append('..') +from typing import Tuple +from acme.etc.Types import ResourceTypes as T, ResponseStatusCode as RC, TimeWindowType +from acme.etc.Types import NotificationEventType, NotificationEventType as NET +from init import * + + +class TestLocation(unittest.TestCase): + + ae = None + aeRI = None + + originator = None + + @classmethod + @unittest.skipIf(noCSE, 'No CSEBase') + def setUpClass(cls) -> None: + testCaseStart('Setup TestLocation') + + # Start notification server + #startNotificationServer() + + dct = { 'm2m:ae' : { + 'rn' : aeRN, + 'api' : APPID, + 'rr' : True, + 'srv' : [ RELEASEVERSION ] + }} + cls.ae, rsc = CREATE(cseURL, 'C', T.AE, dct) # AE to work under + assert rsc == RC.CREATED, 'cannot create parent AE' + + cls.originator = findXPath(cls.ae, 'm2m:ae/aei') + cls.aeRI = findXPath(cls.ae, 'm2m:ae/ri') + + dct = { 'm2m:cnt' : { + 'rn' : f'{cntRN}2' + }} + cls.ae, rsc = CREATE(aeURL, cls.originator, T.CNT, dct) # Extra CNT. Acts as a non-location enabled resource + assert rsc == RC.CREATED, 'cannot create CNT' + + testCaseEnd('Setup TestLocation') + + + @classmethod + @unittest.skipIf(noCSE, 'No CSEBase') + def tearDownClass(cls) -> None: + # if not isTearDownEnabled(): + # stopNotificationServer() + # return + testCaseStart('TearDown TestLocation') + DELETE(aeURL, ORIGINATOR) # Just delete the AE and everything below it. Ignore whether it exists or not + testCaseEnd('TearDown TestLocation') + + + def setUp(self) -> None: + testCaseStart(self._testMethodName) + + + def tearDown(self) -> None: + testCaseEnd(self._testMethodName) + + ######################################################################### + + @unittest.skipIf(noCSE, 'No CSEBase') + def test_createContainerWrongLocFail(self) -> None: + """ CREATE with invalid location -> Fail""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': 'wrong', + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.BAD_REQUEST, r) + + + # + # Point + # + + def test_createContainerLocWrongAttributesFail(self) -> None: + """ CREATE with location & wrong attributes -> Fail""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 1, + 'crd': '[ 1.0, 2.0 ]', + 'wrong': 'wrong' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.BAD_REQUEST, r) + + + def test_createContainerLocPointIntCoordinatesFail(self) -> None: + """ CREATE with location type Point & and integer values -> Fail""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 1, + 'crd': '[ 1, 2 ]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.BAD_REQUEST, r) + + + def test_createContainerLocPointWrongCountFail(self) -> None: + """ CREATE with location type Point & multiple coordinates -> Fail""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 1, + 'crd': '[[ 1.0, 2.0 ], [ 3.0, 4.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.BAD_REQUEST, r) + + + def test_createContainerLocPoint(self) -> None: + """ CREATE with location type Point """ + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 1, + 'crd': '[ 1.0, 2.0 ]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + self.assertEqual(findXPath(r, 'm2m:cnt/loc/typ'), 1, r) + self.assertEqual(findXPath(r, 'm2m:cnt/loc/crd'), '[ 1.0, 2.0 ]', r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + # + # LineString + # + + def test_createContainerLocLineStringWrongCountFail(self) -> None: + """ CREATE with location type LineString & 1 coordinate -> Fail""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 2, + 'crd': '[[ 1.0, 2.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.BAD_REQUEST, r) + + + def test_createContainerLocLineString(self) -> None: + """ CREATE with location type LineString & 2 coordinates""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 2, + 'crd': '[[ 1.0, 2.0 ], [ 3.0, 4.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + self.assertEqual(findXPath(r, 'm2m:cnt/loc/typ'), 2, r) + self.assertEqual(findXPath(r, 'm2m:cnt/loc/crd'), '[[ 1.0, 2.0 ], [ 3.0, 4.0 ]]', r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + # + # Polygon + # + + def test_createContainerLocPolygonWrongCountFail(self) -> None: + """ CREATE with location type Polygon & 1 coordinate -> Fail""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 3, + 'crd': '[[ 1.0, 2.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.BAD_REQUEST, r) + + + def test_createContainerLocPolygonWrongFirstLastCoordinateFail(self) -> None: + """ CREATE with location type Polygon & not matching first and last coordinate -> Fail""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 3, + 'crd': '[[ 1.0, 2.0 ], [ 3.0, 4.0 ], [ 5.0, 6.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.BAD_REQUEST, r) + + + def test_createContainerLocPolygon(self) -> None: + """ CREATE with location type Polygon""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 3, + 'crd': '[[ 1.0, 2.0 ], [ 3.0, 4.0 ], [ 5.0, 6.0 ], [ 1.0, 2.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + self.assertEqual(findXPath(r, 'm2m:cnt/loc/typ'), 3, r) + self.assertEqual(findXPath(r, 'm2m:cnt/loc/crd'), '[[ 1.0, 2.0 ], [ 3.0, 4.0 ], [ 5.0, 6.0 ], [ 1.0, 2.0 ]]', r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + # + # Multipoint + # + + def test_createContainerLocMultiPointWrongFail(self) -> None: + """ CREATE with location type MultiPoint & wrong coordinate -> Fail""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 4, + 'crd': '[1.0, 2.0 ]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.BAD_REQUEST, r) + + + def test_createContainerLocMultiPointWrongCountFail(self) -> None: + """ CREATE with location type MultiPoint & wrong count -> Fail""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 4, + 'crd': '[ [ [1.0, 2.0 ], [ 3.0, 4.0 ] ] ]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.BAD_REQUEST, r) + + + def test_createContainerLocMultiPoint(self) -> None: + """ CREATE with location type MultiPoint""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 4, + 'crd': '[[ 1.0, 2.0 ], [ 3.0, 4.0 ], [ 5.0, 6.0 ], [ 1.0, 2.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + self.assertEqual(findXPath(r, 'm2m:cnt/loc/typ'), 4, r) + self.assertEqual(findXPath(r, 'm2m:cnt/loc/crd'), '[[ 1.0, 2.0 ], [ 3.0, 4.0 ], [ 5.0, 6.0 ], [ 1.0, 2.0 ]]', r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + + # + # MultiLineString + # + + def test_createContainerLocMultiLineStringWrongFail(self) -> None: + """ CREATE with location type MultiLineString & wrong coordinate -> Fail""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 5, + 'crd': '[[1.0, 2.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.BAD_REQUEST, r) + + + def test_createContainerLocMultiLineString2WrongFail(self) -> None: + """ CREATE with location type MultiLineString & wrong coordinate -> Fail""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 5, + 'crd': '[[[1.0, 2.0 ]]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.BAD_REQUEST, r) + + + def test_createContainerLocMultiLineString(self) -> None: + """ CREATE with location type MultiLineString""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 5, + 'crd': '[[[ 1.0, 2.0 ], [ 3.0, 4.0 ]], [[ 5.0, 6.0 ], [ 7.0, 8.0 ]]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + self.assertEqual(findXPath(r, 'm2m:cnt/loc/typ'), 5, r) + self.assertEqual(findXPath(r, 'm2m:cnt/loc/crd'), '[[[ 1.0, 2.0 ], [ 3.0, 4.0 ]], [[ 5.0, 6.0 ], [ 7.0, 8.0 ]]]', r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + # + # MultiPolygon + # + + def test_createContainerLocMultiPolygonWrongFail(self) -> None: + """ CREATE with location type MultiPolygon & wrong coordinate -> Fail""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 6, + 'crd': '[[1.0, 2.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.BAD_REQUEST, r) + + + def test_createContainerLocMultiPolygonWrongFirstLastCoordinateFail(self) -> None: + """ CREATE with location type MultiPolygon & not matching first and last coordinate -> Fail""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 6, + 'crd': '[[[ 1.0, 2.0 ], [ 3.0, 4.0 ], [ 5.0, 6.0 ]]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.BAD_REQUEST, r) + + + def test_createContainerLocMultiPolygon(self) -> None: + """ CREATE with location type MultiPolygon""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 6, + 'crd': '[[[ 1.0, 2.0 ], [ 3.0, 4.0 ], [ 5.0, 6.0 ], [ 1.0, 2.0 ]]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + self.assertEqual(findXPath(r, 'm2m:cnt/loc/typ'), 6, r) + self.assertEqual(findXPath(r, 'm2m:cnt/loc/crd'), '[[[ 1.0, 2.0 ], [ 3.0, 4.0 ], [ 5.0, 6.0 ], [ 1.0, 2.0 ]]]', r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + # + # geo-query + # + + def test_geoQueryGmtyOnlyFail(self) -> None: + """ RETRIEVE with rcn=4, gmty only -> Fail""" + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=1', self.originator) + self.assertEqual(rsc, RC.BAD_REQUEST, r) + + + def test_geoQueryGeomOnlyFail(self) -> None: + """ RETRIEVE with rcn=4, geom only -> Fail""" + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&geom=[1.0,2.0]', self.originator) + self.assertEqual(rsc, RC.BAD_REQUEST, r) + + + def test_geoQueryGsfOnlyFail(self) -> None: + """ RETRIEVE with rcn=4, gsf only -> Fail""" + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gsf=1', self.originator) + self.assertEqual(rsc, RC.BAD_REQUEST, r) + + + def test_geoQueryGeomWrongFail(self) -> None: + """ RETRIEVE with rcn=4, geometry wrong format -> Fail""" + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=1&gsf=1&geom=1.0', self.originator) + self.assertEqual(rsc, RC.BAD_REQUEST, r) + + # Point + + def test_geoQueryPointWithinPolygon(self) -> None: + """ CREATE , RETRIEVE , geometry point is within polygon""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 3, + 'crd': '[[ 0.0, 0.0 ], [ 1.0, 0.0 ], [ 1.0, 1.0 ], [ 0.0, 1.0 ], [ 0.0, 0.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=1&gsf=1&geom=[0.5,0.5]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNotNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryPointOutsidePolygon(self) -> None: + """ CREATE , RETRIEVE , geometry point outside polygon""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 3, + 'crd': '[[ 0.0, 0.0 ], [ 1.0, 0.0 ], [ 1.0, 1.0 ], [ 0.0, 1.0 ], [ 0.0, 0.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=1&gsf=1&geom=[2.0,2.0]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryPointWithinPoint(self) -> None: + """ CREATE , RETRIEVE , geometry point is within point""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 1, + 'crd': '[ 1.0, 1.0 ]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=1&gsf=1&geom=[1.0,1.0]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNotNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryPointContainsPoint(self) -> None: + """ CREATE , RETRIEVE , geometry point contains point""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 1, + 'crd': '[ 1.0, 1.0 ]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=1&gsf=2&geom=[1.0,1.0]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNotNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryPointContainsPolygonFail(self) -> None: + """ CREATE , RETRIEVE , geometry point contains polygon -> Fail""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 3, + 'crd': '[[ 0.0, 0.0 ], [ 1.0, 0.0 ], [ 1.0, 1.0 ], [ 0.0, 1.0 ], [ 0.0, 0.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=1&gsf=2&geom=[0.5,0.5]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryPointIntersectsPoint(self) -> None: + """ CREATE , RETRIEVE , geometry point intersects point""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 1, + 'crd': '[ 1.0, 1.0 ]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=1&gsf=3&geom=[1.0,1.0]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNotNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryPointIntersectsPointFail(self) -> None: + """ CREATE , RETRIEVE , geometry point intersects point -> Fail""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 1, + 'crd': '[ 1.0, 1.0 ]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=1&gsf=3&geom=[2.0,2.0]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryPointIntersectsPolygon(self) -> None: + """ CREATE , RETRIEVE , geometry point intersects polygon""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 3, + 'crd': '[[ 0.0, 0.0 ], [ 1.0, 0.0 ], [ 1.0, 1.0 ], [ 0.0, 1.0 ], [ 0.0, 0.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=1&gsf=3&geom=[0.0,0.5]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNotNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryPointIntersectsPolygonFail(self) -> None: + """ CREATE , RETRIEVE , geometry point intersects polygon -> Fail""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 3, + 'crd': '[[ 0.0, 0.0 ], [ 1.0, 0.0 ], [ 1.0, 1.0 ], [ 0.0, 1.0 ], [ 0.0, 0.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=1&gsf=2&geom=[0.5,0.5]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + + # LineString + + def test_geoQueryLineStringWithinPolygon(self) -> None: + """ CREATE , RETRIEVE , geometry line strinng is within polygon""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 3, + 'crd': '[[ 0.0, 0.0 ], [ 1.0, 0.0 ], [ 1.0, 1.0 ], [ 0.0, 1.0 ], [ 0.0, 0.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=2&gsf=1&geom=[[0.5,0.5],[0.6,0.6]]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNotNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryLineStringOutsidePolygon(self) -> None: + """ CREATE , RETRIEVE , geometry line string outside polygon""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 3, + 'crd': '[[ 0.0, 0.0 ], [ 1.0, 0.0 ], [ 1.0, 1.0 ], [ 0.0, 1.0 ], [ 0.0, 0.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=2&gsf=1&geom=[[2.0,2.0],[3.0,3.0]]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryPointWithinLineString1(self) -> None: + """ CREATE , RETRIEVE , geometry point is within LineString start point""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 2, + 'crd': '[[ 1.0, 1.0 ], [ 2.0, 2.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=1&gsf=1&geom=[1.0,1.0]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryPointWithinLineString2(self) -> None: + """ CREATE , RETRIEVE , geometry point is within LineString middle""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 2, + 'crd': '[[ 1.0, 1.0 ], [ 2.0, 2.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=1&gsf=1&geom=[1.5,1.5]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNotNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryLineStringContainsLineString(self) -> None: + """ CREATE , RETRIEVE , geometry line string contains line string""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 2, + 'crd': '[[ 1.0, 1.0 ], [ 2.0, 2.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=2&gsf=2&geom=[[1.0,1.0],[2.0,2.0]]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNotNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryLineStringContainsPolygonFail(self) -> None: + """ CREATE , RETRIEVE , geometry point contains polygon -> Fail""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 3, + 'crd': '[[ 0.0, 0.0 ], [ 1.0, 0.0 ], [ 1.0, 1.0 ], [ 0.0, 1.0 ], [ 0.0, 0.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=2&gsf=2&geom=[[0.5,0.5],[0.6,0.6]]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryPointIntersectsLineString(self) -> None: + """ CREATE , RETRIEVE , geometry point intersects line string""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 2, + 'crd': '[[ 1.0, 1.0 ], [ 2.0, 2.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=1&gsf=3&geom=[1.5,1.5]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNotNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryLineStringIntersectsLineString(self) -> None: + """ CREATE , RETRIEVE , geometry line string intersects line string""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 2, + 'crd': '[[ 1.0, 1.0 ], [ 2.0, 2.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=2&gsf=3&geom=[[2.0,1.0],[1.0,2.0]]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNotNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryLineStringIntersectsLineStringFail(self) -> None: + """ CREATE , RETRIEVE , geometry line string intersects line string -> Fail""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 2, + 'crd': '[[ 1.0, 1.0 ], [ 2.0, 2.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=2&gsf=3&geom=[[3.0,3.0],[4.0,4.0]]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + # Polygon + + def test_geoQueryPolygonWithinPolygon(self) -> None: + """ CREATE , RETRIEVE , geometry polygon is within polygon""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 3, + 'crd': '[[ 0.0, 0.0 ], [ 1.0, 0.0 ], [ 1.0, 1.0 ], [ 0.0, 1.0 ], [ 0.0, 0.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=3&gsf=1&geom=[[0.5,0.5],[0.6,0.5],[0.6,0.6],[0.5,0.6],[0.5,0.5]]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNotNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryPolygonOutsidePolygon(self) -> None: + """ CREATE , RETRIEVE , geometry polygon outside polygon""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 3, + 'crd': '[[ 0.0, 0.0 ], [ 1.0, 0.0 ], [ 1.0, 1.0 ], [ 0.0, 1.0 ], [ 0.0, 0.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=3&gsf=1&geom=[[2.0,2.0],[3.0,2.0],[3.0,3.0],[2.0,3.0],[2.0,2.0]]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryPolygonPartlyWithinPolygonFail(self) -> None: + """ CREATE , RETRIEVE , geometry polygon partly is within polygon -> Fail""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 3, + 'crd': '[[ 0.0, 0.0 ], [ 1.0, 0.0 ], [ 1.0, 1.0 ], [ 0.0, 1.0 ], [ 0.0, 0.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=3&gsf=1&geom=[[0.5,0.5],[1.5,0.5],[1.5,1.5],[0.5,1.5],[0.5,0.5]]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryPolygonContainsPolygon(self) -> None: + """ CREATE , RETRIEVE , geometry polygon contains polygon """ + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 3, + 'crd': '[[ 0.0, 0.0 ], [ 1.0, 0.0 ], [ 1.0, 1.0 ], [ 0.0, 1.0 ], [ 0.0, 0.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=3&gsf=2&geom=[[0.0,0.0],[2.0,0.0],[2.0,2.0],[0.0,2.0],[0.0,0.0]]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNotNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryPolygonContainsPolygonFail(self) -> None: + """ CREATE , RETRIEVE , geometry polygon contains polygon -> Fail""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 3, + 'crd': '[[ 0.0, 0.0 ], [ 3.0, 0.0 ], [ 3.0, 3.0 ], [ 0.0, 3.0 ], [ 0.0, 0.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=3&gsf=2&geom=[[0.0,0.0],[2.0,0.0],[2.0,2.0],[0.0,2.0],[0.0,0.0]]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryPolygonIntersectsPolygon(self) -> None: + """ CREATE , RETRIEVE , geometry polygon intersects polygon""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 3, + 'crd': '[[ 0.0, 0.0 ], [ 1.0, 0.0 ], [ 1.0, 1.0 ], [ 0.0, 1.0 ], [ 0.0, 0.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=3&gsf=3&geom=[[0.5,0.5],[2.0,0.5],[2.0,2.0],[0.5,2.0],[0.5,0.5]]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNotNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryPolygonIntersectsPolygonFail(self) -> None: + """ CREATE , RETRIEVE , geometry polygon intersects polygon -> Fail""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 3, + 'crd': '[[ 0.0, 0.0 ], [ 1.0, 0.0 ], [ 1.0, 1.0 ], [ 0.0, 1.0 ], [ 0.0, 0.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=3&gsf=3&geom=[[1.5,1.5],[2.0,1.5],[2.0,2.0],[1.5,2.0],[1.5,1.5]]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + + # MultiPoint + + def test_geoQueryMultiPointWithinPolygon(self) -> None: + """ CREATE , RETRIEVE , geometry multi point is within polygon""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 3, + 'crd': '[[ 0.0, 0.0 ], [ 1.0, 0.0 ], [ 1.0, 1.0 ], [ 0.0, 1.0 ], [ 0.0, 0.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=4&gsf=1&geom=[[0.5,0.5],[0.6,0.6]]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNotNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryMultiPointOutsidePolygon(self) -> None: + """ CREATE , RETRIEVE , geometry multi point outside polygon""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 3, + 'crd': '[[ 0.0, 0.0 ], [ 1.0, 0.0 ], [ 1.0, 1.0 ], [ 0.0, 1.0 ], [ 0.0, 0.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=4&gsf=1&geom=[[2.0,2.0],[3.0,3.0]]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryMultiPointOutsidePolygonWrongGmtyFail(self) -> None: + """ CREATE , RETRIEVE , geometry type invalid for geometry -> Fail""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 3, + 'crd': '[[ 0.0, 0.0 ], [ 1.0, 0.0 ], [ 1.0, 1.0 ], [ 0.0, 1.0 ], [ 0.0, 0.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + # request with invalid geometry + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=3&gsf=1&geom=[[2.0,2.0],[3.0,3.0]]', self.originator) + self.assertEqual(rsc, RC.BAD_REQUEST, r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryMultiPointContainsPoint(self) -> None: + """ CREATE , RETRIEVE , geometry multi point contains Point""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 1, + 'crd': '[0.5, 0.5]', + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=4&gsf=2&geom=[[0.5,0.5],[0.6,0.6]]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNotNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryMultiPointContainsPointFail(self) -> None: + """ CREATE , RETRIEVE , geometry multi point contains Point -> Fail""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 1, + 'crd': '[0.5, 0.5]', + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=4&gsf=2&geom=[[0.4,0.4],[0.6,0.6]]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryPointIntersectsMultiPoint(self) -> None: + """ CREATE , RETRIEVE , geometry point intersects multi point""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 4, + 'crd': '[[ 0.0, 0.0 ], [ 1.0, 0.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=1&gsf=3&geom=[0.0,0.0]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNotNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryPointIntersectsMultiPointFail(self) -> None: + """ CREATE , RETRIEVE , geometry point intersects multi point -> Fail""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 4, + 'crd': '[[ 0.0, 0.0 ], [ 1.0, 0.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=1&gsf=3&geom=[2.0,2.0]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryMultiPointIntersectsMultiPoint(self) -> None: + """ CREATE , RETRIEVE , geometry multi point intersects multi point""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 4, + 'crd': '[[ 0.0, 0.0 ], [ 1.0, 0.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=4&gsf=3&geom=[[0.0,0.0],[2.0,2.0]]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNotNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryMultiPointIntersectsMultiPointFail(self) -> None: + """ CREATE , RETRIEVE , geometry multi point intersects multi point -> Fail""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 4, + 'crd': '[[ 0.0, 0.0 ], [ 1.0, 0.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=4&gsf=3&geom=[[3.0,3.0],[2.0,2.0]]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + + # MultiLinestring + + def test_geoQueryMultiLinestringWithinPolygon(self) -> None: + """ CREATE , RETRIEVE , geometry multi line string within polygon""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 3, + 'crd': '[[ 0.0, 0.0 ], [ 1.0, 0.0 ], [ 1.0, 1.0 ], [ 0.0, 1.0 ], [ 0.0, 0.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=5&gsf=1&geom=[[[0.5,0.5],[0.6,0.6]],[[0.7,0.7],[0.8,0.8]]]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNotNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryMultiLinestringOutsidePolygon(self) -> None: + """ CREATE , RETRIEVE , geometry multi line string outside polygon""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 3, + 'crd': '[[ 0.0, 0.0 ], [ 1.0, 0.0 ], [ 1.0, 1.0 ], [ 0.0, 1.0 ], [ 0.0, 0.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=5&gsf=1&geom=[[[1.5,1.5],[1.6,1.6]],[[1.7,1.7],[1.8,1.8]]]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryMultiLineContainsPoint(self) -> None: + """ CREATE , RETRIEVE , geometry multi line contains Point""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 1, + 'crd': '[1.55, 1.55]', + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=5&gsf=2&geom=[[[1.5,1.5],[1.6,1.6]],[[1.7,1.7],[1.8,1.8]]]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNotNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryMultiLineContainsPointFail(self) -> None: + """ CREATE , RETRIEVE , geometry multi line contains Point -> Fail""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 1, + 'crd': '[0.5, 0.5]', + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=5&gsf=2&geom=[[[1.5,1.5],[1.6,1.6]],[[1.7,1.7],[1.8,1.8]]]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryPointIntersectsMultiLine(self) -> None: + """ CREATE , RETRIEVE , geometry point intersects multi line""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 5, + 'crd': '[[[1.0,1.0],[2.0,2.0]],[[3.0,3.0],[4.0,4.0]]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=1&gsf=3&geom=[1.5,1.5]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNotNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryPointIntersectsMultiLineFail(self) -> None: + """ CREATE , RETRIEVE , geometry point intersects multi line -> Fail""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 5, + 'crd': '[[[1.0,1.0],[2.0,2.0]],[[3.0,3.0],[4.0,4.0]]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=1&gsf=3&geom=[5.0,5.0]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryLineStringIntersectsMultiLine(self) -> None: + """ CREATE , RETRIEVE , geometry line string intersects multi line""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 5, + 'crd': '[[[1.0,1.0],[2.0,2.0]],[[3.0,3.0],[4.0,4.0]]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=2&gsf=3&geom=[[2.0,1.0],[1.0,2.0]]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNotNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryLineStringIntersectsMultiLineFail(self) -> None: + """ CREATE , RETRIEVE , geometry line string intersects multi line -> Fail""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 5, + 'crd': '[[[1.0,1.0],[2.0,2.0]],[[3.0,3.0],[4.0,4.0]]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=2&gsf=3&geom=[[5.0,5.0],[6.0,6.0]]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + + # MultiPolygon + + def test_geoQueryMultiPolygonWithinPolygon(self) -> None: + """ CREATE , RETRIEVE , geometry multi polygon is within polygon""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 3, + 'crd': '[[ 0.0, 0.0 ], [ 1.0, 0.0 ], [ 1.0, 1.0 ], [ 0.0, 1.0 ], [ 0.0, 0.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=6&gsf=1&geom=[[[0.5,0.5],[0.6,0.5],[0.6,0.6],[0.5,0.6],[0.5,0.5]],[[0.7,0.7],[0.8,0.7],[0.8,0.8],[0.7,0.8],[0.7,0.7]]]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNotNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryMultiPolygonOutsidePolygon(self) -> None: + """ CREATE , RETRIEVE , geometry multi polygon outside polygon""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 3, + 'crd': '[[ 0.0, 0.0 ], [ 1.0, 0.0 ], [ 1.0, 1.0 ], [ 0.0, 1.0 ], [ 0.0, 0.0 ]]' + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=6&gsf=1&geom=[[[1.5,1.5],[1.6,1.5],[1.6,1.6],[1.5,1.6],[1.5,1.5]],[[1.7,1.7],[1.8,1.7],[1.8,1.8],[1.7,1.8],[1.7,1.7]]]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryMultiPolygonContainsPoint(self) -> None: + """ CREATE , RETRIEVE , geometry multi polygon contains Point""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 1, + 'crd': '[1.55, 1.55]', + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=6&gsf=2&geom=[[[1.5,1.5],[1.6,1.5],[1.6,1.6],[1.5,1.6],[1.5,1.5]],[[1.7,1.7],[1.8,1.7],[1.8,1.8],[1.7,1.8],[1.7,1.7]]]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNotNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryMultiPolygonContainsPointFail(self) -> None: + """ CREATE , RETRIEVE , geometry multi line contains Point -> Fail""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 1, + 'crd': '[0.5, 0.5]', + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=6&gsf=2&geom=[[[1.5,1.5],[1.6,1.5],[1.6,1.6],[1.5,1.6],[1.5,1.5]],[[1.7,1.7],[1.8,1.7],[1.8,1.8],[1.7,1.8],[1.7,1.7]]]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryPointIntersectsMultiPolygon(self) -> None: + """ CREATE , RETRIEVE , geometry point intersects multi polygon""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 5, + 'crd': '[[[1.5,1.5],[1.6,1.5],[1.6,1.6],[1.5,1.6],[1.5,1.5]],[[1.7,1.7],[1.8,1.7],[1.8,1.8],[1.7,1.8],[1.7,1.7]]]', + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=1&gsf=3&geom=[1.55,1.5]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNotNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryPointIntersectsMultiPolygonFail(self) -> None: + """ CREATE , RETRIEVE , geometry point intersects multi polygon -> Fail""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 5, + 'crd': '[[[1.5,1.5],[1.6,1.5],[1.6,1.6],[1.5,1.6],[1.5,1.5]],[[1.7,1.7],[1.8,1.7],[1.8,1.8],[1.7,1.8],[1.7,1.7]]]', + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=1&gsf=3&geom=[2.0,2.0]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryPolygonIntersectsMultiPolygon(self) -> None: + """ CREATE , RETRIEVE , geometry polygon intersects multi polygon""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 5, + 'crd': '[[[1.5,1.5],[1.6,1.5],[1.6,1.6],[1.5,1.6],[1.5,1.5]],[[1.7,1.7],[1.8,1.7],[1.8,1.8],[1.7,1.8],[1.7,1.7]]]', + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=3&gsf=3&geom=[[0.0,0.0],[2.0,0.0],[2.0,2.0],[0.0,2.0],[0.0,0.0]]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNotNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + def test_geoQueryPolygonIntersectsMultiPolygonFail(self) -> None: + """ CREATE , RETRIEVE , geometry polygon intersects multi polygon -> Fail""" + + dct = { 'm2m:cnt': { + 'rn': cntRN, + 'loc': { + 'typ': 5, + 'crd': '[[[1.5,1.5],[1.6,1.5],[1.6,1.6],[1.5,1.6],[1.5,1.5]],[[1.7,1.7],[1.8,1.7],[1.8,1.8],[1.7,1.8],[1.7,1.7]]]', + }, + }} + r, rsc = CREATE(aeURL, self.originator, T.CNT, dct) + self.assertEqual(rsc, RC.CREATED, r) + + r, rsc = RETRIEVE(f'{aeURL}?rcn=4&gmty=3&gsf=3&geom=[[2.0,2.0],[4.0,2.0],[4.0,4.0],[2.0,4.0],[2.0,2.0]]', self.originator) + self.assertEqual(rsc, RC.OK, r) + self.assertIsNone(findXPath(r, 'm2m:ae/m2m:cnt'), r) + + r, rsc = DELETE(cntURL, self.originator) + self.assertEqual(rsc, RC.DELETED, r) + + ######################################################################### + + +def run(testFailFast:bool) -> Tuple[int, int, int, float]: + suite = unittest.TestSuite() + + # basic tests + addTest(suite, TestLocation('test_createContainerWrongLocFail')) + + # Point + addTest(suite, TestLocation('test_createContainerLocWrongAttributesFail')) + addTest(suite, TestLocation('test_createContainerLocPointIntCoordinatesFail')) + addTest(suite, TestLocation('test_createContainerLocPointWrongCountFail')) + addTest(suite, TestLocation('test_createContainerLocPoint')) + + # LineString + addTest(suite, TestLocation('test_createContainerLocLineStringWrongCountFail')) + addTest(suite, TestLocation('test_createContainerLocLineString')) + + # Polygon + addTest(suite, TestLocation('test_createContainerLocPolygonWrongCountFail')) + addTest(suite, TestLocation('test_createContainerLocPolygonWrongFirstLastCoordinateFail')) + addTest(suite, TestLocation('test_createContainerLocPolygon')) + + # MultiPoint + addTest(suite, TestLocation('test_createContainerLocMultiPointWrongFail')) + addTest(suite, TestLocation('test_createContainerLocMultiPointWrongCountFail')) + addTest(suite, TestLocation('test_createContainerLocMultiPoint')) + + # MultiLineString + addTest(suite, TestLocation('test_createContainerLocMultiLineStringWrongFail')) + addTest(suite, TestLocation('test_createContainerLocMultiLineString2WrongFail')) + addTest(suite, TestLocation('test_createContainerLocMultiLineString')) + + # MultiPolygon + addTest(suite, TestLocation('test_createContainerLocMultiPolygonWrongFail')) + addTest(suite, TestLocation('test_createContainerLocMultiPolygonWrongFirstLastCoordinateFail')) + addTest(suite, TestLocation('test_createContainerLocMultiPolygon')) + + # geo-query + addTest(suite, TestLocation('test_geoQueryGmtyOnlyFail')) + addTest(suite, TestLocation('test_geoQueryGeomOnlyFail')) + addTest(suite, TestLocation('test_geoQueryGsfOnlyFail')) + addTest(suite, TestLocation('test_geoQueryGeomWrongFail')) + + addTest(suite, TestLocation('test_geoQueryPointWithinPolygon')) + addTest(suite, TestLocation('test_geoQueryPointOutsidePolygon')) + addTest(suite, TestLocation('test_geoQueryPointWithinPoint')) + addTest(suite, TestLocation('test_geoQueryPointContainsPoint')) + addTest(suite, TestLocation('test_geoQueryPointContainsPolygonFail')) + addTest(suite, TestLocation('test_geoQueryPointIntersectsPoint')) + addTest(suite, TestLocation('test_geoQueryPointIntersectsPointFail')) + addTest(suite, TestLocation('test_geoQueryPointIntersectsPolygon')) + addTest(suite, TestLocation('test_geoQueryPointIntersectsPolygonFail')) + + addTest(suite, TestLocation('test_geoQueryLineStringWithinPolygon')) + addTest(suite, TestLocation('test_geoQueryLineStringOutsidePolygon')) + addTest(suite, TestLocation('test_geoQueryPointWithinLineString1')) + addTest(suite, TestLocation('test_geoQueryPointWithinLineString2')) + addTest(suite, TestLocation('test_geoQueryLineStringContainsLineString')) + addTest(suite, TestLocation('test_geoQueryLineStringContainsPolygonFail')) + addTest(suite, TestLocation('test_geoQueryLineStringIntersectsLineString')) + addTest(suite, TestLocation('test_geoQueryLineStringIntersectsLineStringFail')) + + addTest(suite, TestLocation('test_geoQueryPolygonWithinPolygon')) + addTest(suite, TestLocation('test_geoQueryPolygonOutsidePolygon')) + addTest(suite, TestLocation('test_geoQueryPolygonPartlyWithinPolygonFail')) + addTest(suite, TestLocation('test_geoQueryPointContainsPolygonFail')) + addTest(suite, TestLocation('test_geoQueryPolygonContainsPolygon')) + addTest(suite, TestLocation('test_geoQueryPolygonContainsPolygonFail')) + addTest(suite, TestLocation('test_geoQueryPolygonIntersectsPolygon')) + addTest(suite, TestLocation('test_geoQueryPolygonIntersectsPolygonFail')) + + addTest(suite, TestLocation('test_geoQueryMultiPointWithinPolygon')) + addTest(suite, TestLocation('test_geoQueryMultiPointOutsidePolygon')) + addTest(suite, TestLocation('test_geoQueryMultiPointOutsidePolygonWrongGmtyFail')) + addTest(suite, TestLocation('test_geoQueryMultiPointContainsPoint')) + addTest(suite, TestLocation('test_geoQueryMultiPointContainsPointFail')) + addTest(suite, TestLocation('test_geoQueryPointIntersectsMultiPoint')) + addTest(suite, TestLocation('test_geoQueryPointIntersectsMultiPointFail')) + addTest(suite, TestLocation('test_geoQueryMultiPointIntersectsMultiPoint')) + addTest(suite, TestLocation('test_geoQueryMultiPointIntersectsMultiPointFail')) + + addTest(suite, TestLocation('test_geoQueryMultiLinestringWithinPolygon')) + addTest(suite, TestLocation('test_geoQueryMultiLinestringOutsidePolygon')) + addTest(suite, TestLocation('test_geoQueryMultiLineContainsPoint')) + addTest(suite, TestLocation('test_geoQueryMultiLineContainsPointFail')) + addTest(suite, TestLocation('test_geoQueryPointIntersectsMultiLine')) + addTest(suite, TestLocation('test_geoQueryPointIntersectsMultiLineFail')) + addTest(suite, TestLocation('test_geoQueryLineStringIntersectsMultiLine')) + addTest(suite, TestLocation('test_geoQueryLineStringIntersectsMultiLineFail')) + + addTest(suite, TestLocation('test_geoQueryMultiPolygonWithinPolygon')) + addTest(suite, TestLocation('test_geoQueryMultiPolygonOutsidePolygon')) + addTest(suite, TestLocation('test_geoQueryMultiPolygonContainsPoint')) + addTest(suite, TestLocation('test_geoQueryMultiPolygonContainsPointFail')) + addTest(suite, TestLocation('test_geoQueryPointIntersectsMultiPolygon')) + addTest(suite, TestLocation('test_geoQueryPointIntersectsMultiPolygonFail')) + addTest(suite, TestLocation('test_geoQueryPolygonIntersectsMultiPolygon')) + addTest(suite, TestLocation('test_geoQueryPolygonIntersectsMultiPolygonFail')) + + result = unittest.TextTestRunner(verbosity = testVerbosity, failfast = testFailFast).run(suite) + return result.testsRun, len(result.errors + result.failures), len(result.skipped), getSleepTimeCount() + + +if __name__ == '__main__': + r, errors, s, t = run(True) + sys.exit(errors) \ No newline at end of file diff --git a/tests/testMgmtObj.py b/tests/testMgmtObj.py index a0fb5fb9..5061170f 100644 --- a/tests/testMgmtObj.py +++ b/tests/testMgmtObj.py @@ -1134,7 +1134,7 @@ def test_updateDATCrpscInvalidSchedule1Fail(self) -> None: def test_updateDATCrpscInvalidSchedule2Fail(self) -> None: """ UPDATE [dataCollection] rpsc with an invalid schedule -> FAIL""" dct = { 'dcfg:datc' : { - 'rpsc': [ { 'sce': '10 * * * *' } ], # invalid format, must be 7 + 'rpsc': [ { 'sce': [ '10 * * * *' ] } ], # invalid format, must be 7 }} r, rsc = UPDATE(self.datcURL, ORIGINATOR, dct) self.assertEqual(rsc, RC.BAD_REQUEST, r) @@ -1144,7 +1144,7 @@ def test_updateDATCrpscInvalidSchedule2Fail(self) -> None: def test_updateDATCrpscValidSchedule(self) -> None: """ UPDATE [dataCollection] rpsc with a valid schedule""" dct = { 'dcfg:datc' : { - 'rpsc': [ { 'sce': '10 * * * * * *' } ], + 'rpsc': [ { 'sce': [ '10 * * * * * *' ] } ], }} r, rsc = UPDATE(self.datcURL, ORIGINATOR, dct) self.assertEqual(rsc, RC.UPDATED, r) @@ -1187,7 +1187,7 @@ def test_updateDATCmescInvalidSchedule1Fail(self) -> None: def test_updateDATCmescInvalidSchedule2Fail(self) -> None: """ UPDATE [dataCollection] mesc with an invalid schedule -> FAIL""" dct = { 'dcfg:datc' : { - 'mesc': [ { 'sce': '10 * * * *' } ], # invalid format, must be 7 + 'mesc': [ { 'sce': [ '10 * * * *' ] } ], # invalid format, must be 7 }} r, rsc = UPDATE(self.datcURL, ORIGINATOR, dct) self.assertEqual(rsc, RC.BAD_REQUEST, r) @@ -1197,7 +1197,7 @@ def test_updateDATCmescInvalidSchedule2Fail(self) -> None: def test_updateDATCmescValidSchedule(self) -> None: """ UPDATE [dataCollection] mesc with a valid schedule""" dct = { 'dcfg:datc' : { - 'mesc': [ { 'sce': '10 * * * * * *' } ], + 'mesc': [ { 'sce': [ '10 * * * * * *' ] } ], }} r, rsc = UPDATE(self.datcURL, ORIGINATOR, dct) self.assertEqual(rsc, RC.UPDATED, r) @@ -1259,7 +1259,7 @@ def test_attributesDATC(self) -> None: self.assertEqual(len(rpsc), 1, r) self.assertIsInstance((rpsce := rpsc[0]), dict, r) self.assertIsNotNone((sce := rpsce.get('sce')), r) - self.assertEqual(sce, '10 * * * * * *', r) + self.assertEqual(sce, [ '10 * * * * * *' ], r) @unittest.skipIf(noCSE, 'No CSEBase') diff --git a/tests/testREQ.py b/tests/testREQ.py index 45462f17..1b9b990c 100644 --- a/tests/testREQ.py +++ b/tests/testREQ.py @@ -140,13 +140,19 @@ def test_retrieveCSENBSynchValidateREQ(self) -> None: self.assertEqual(rsc, RC.ACCEPTED_NON_BLOCKING_REQUEST_SYNC, r) self.assertIsNotNone(findXPath(r, 'm2m:uri')) requestURI = findXPath(r, 'm2m:uri') + rqi = lastRequestID() # Immediately retrieve r, rsc = RETRIEVE(f'{csiURL}/{requestURI}', TestREQ.originator) self.assertEqual(rsc, RC.OK, r) self.assertIsNotNone(findXPath(r, 'm2m:req/rs'), r) self.assertEqual(findXPath(r, 'm2m:req/rs'), RequestStatus.PENDING, r) - self.assertIsNone(findXPath(r, 'm2m:req/ors'), r) + self.assertIsNotNone(findXPath(r, 'm2m:req/ors'), r) + self.assertIsNotNone(findXPath(r, 'm2m:req/ors/rsc')) + self.assertEqual(findXPath(r, 'm2m:req/ors/rsc'), RC.ACCEPTED) + self.assertIsNotNone(findXPath(r, 'm2m:req/ors/rqi')) + self.assertEqual(findXPath(r, 'm2m:req/ors/rqi'), rqi) # test the request ID from the original request + # get and check after a delay to give the operation time to run testSleep(requestCheckDelay * 2) diff --git a/tests/testRemote_Annc.py b/tests/testRemote_Annc.py index 317d2119..ee5415b5 100644 --- a/tests/testRemote_Annc.py +++ b/tests/testRemote_Annc.py @@ -149,6 +149,63 @@ def test_deleteAnnounceAE(self) -> None: # create an announced AE, including announced attribute # + # + # Perhaps the following three (fail) tests should be moved to somewhere else + # But using the "aa" attribute seems to be the easiest way to test the + # "ncname" validation. + # + + # Create an AE with AT and AA, but wrong char in attribute + @unittest.skipIf(noRemote or noCSE, 'No CSEBase or remote CSEBase') + def test_createAnnounceAEwithATwithWrongAA1Fail(self) -> None: + """ Create and announce (AT, AA) with wrong char in attribute -> Fail """ + dct = { 'm2m:ae' : { + 'rn': aeRN, + 'api': APPID, + 'rr': False, + 'srv': [ RELEASEVERSION ], + 'lbl': [ 'aLabel'], + 'at': [ REMOTECSEID ], + 'aa': [ 'lbl', 'lb+l'] # wrong attribute + }} + r, rsc = CREATE(cseURL, 'C', T.AE, dct) + self.assertEqual(rsc, RC.BAD_REQUEST) + + + # Create an AE with AT and AA, but space in attribute + @unittest.skipIf(noRemote or noCSE, 'No CSEBase or remote CSEBase') + def test_createAnnounceAEwithATwithWrongAA2Fail(self) -> None: + """ Create and announce (AT, AA) with space in attribute -> Fail """ + dct = { 'm2m:ae' : { + 'rn': aeRN, + 'api': APPID, + 'rr': False, + 'srv': [ RELEASEVERSION ], + 'lbl': [ 'aLabel'], + 'at': [ REMOTECSEID ], + 'aa': [ 'lbl', 'lb l'] # wrong attribute + }} + r, rsc = CREATE(cseURL, 'C', T.AE, dct) + self.assertEqual(rsc, RC.BAD_REQUEST) + + + # Create an AE with AT and AA, but leading digit in attribute + @unittest.skipIf(noRemote or noCSE, 'No CSEBase or remote CSEBase') + def test_createAnnounceAEwithATwithWrongAA3Fail(self) -> None: + """ Create and announce (AT, AA) with space in attribute -> Fail """ + dct = { 'm2m:ae' : { + 'rn': aeRN, + 'api': APPID, + 'rr': False, + 'srv': [ RELEASEVERSION ], + 'lbl': [ 'aLabel'], + 'at': [ REMOTECSEID ], + 'aa': [ 'lbl', '1lbl'] # wrong attribute + }} + r, rsc = CREATE(cseURL, 'C', T.AE, dct) + self.assertEqual(rsc, RC.BAD_REQUEST) + + # Create an AE with AT and AA @unittest.skipIf(noRemote or noCSE, 'No CSEBase or remote CSEBase') def test_createAnnounceAEwithATwithAA(self) -> None: @@ -789,6 +846,9 @@ def run(testFailFast:bool) -> Tuple[int, int, int, float]: addTest(suite, TestRemote_Annc('test_deleteAnnounceAE')) # create an announced AE, including announced attribute + addTest(suite, TestRemote_Annc('test_createAnnounceAEwithATwithWrongAA1Fail')) + addTest(suite, TestRemote_Annc('test_createAnnounceAEwithATwithWrongAA2Fail')) + addTest(suite, TestRemote_Annc('test_createAnnounceAEwithATwithWrongAA3Fail')) addTest(suite, TestRemote_Annc('test_createAnnounceAEwithATwithAA')) addTest(suite, TestRemote_Annc('test_retrieveAnnouncedAEwithATwithAA')) addTest(suite, TestRemote_Annc('test_deleteAnnounceAE')) diff --git a/tests/testRequests.py b/tests/testRequests.py index a1045904..e1a98a40 100644 --- a/tests/testRequests.py +++ b/tests/testRequests.py @@ -11,7 +11,7 @@ if '..' not in sys.path: sys.path.append('..') from typing import Tuple -from acme.etc.Types import ResourceTypes as T, ResponseStatusCode as RC +from acme.etc.Types import ResourceTypes as T, ResponseStatusCode as RC, ResponseType from init import * # TODO transfer requests @@ -34,6 +34,7 @@ def setUpClass(cls) -> None: cls.ae, rsc = CREATE(cseURL, 'C', T.AE, dct) # AE to work under assert rsc == RC.CREATED, 'cannot create parent AE' cls.originator = findXPath(cls.ae, 'm2m:ae/aei') + enableShortResourceExpirations() testCaseEnd('Setup TestRequests') @@ -46,6 +47,7 @@ def tearDownClass(cls) -> None: testCaseStart('TearDown TestRequests') DELETE(aeURL, ORIGINATOR) # Just delete the AE and everything below it. Ignore whether it exists or not stopNotificationServer() + disableShortResourceExpirations() testCaseEnd('TearDown TestRequests') @@ -102,14 +104,14 @@ def test_OETfutureSeconds(self) -> None: @unittest.skipIf(noCSE, 'No CSEBase') - def test_RETnow(self) -> None: + def test_RETnowFail(self) -> None: """ RETRIEVE with RQET absolute now -> FAIL """ r, rsc = RETRIEVE(aeURL, TestRequests.originator, headers={ C.hfRET : DateUtils.getResourceDate()}) self.assertEqual(rsc, RC.REQUEST_TIMEOUT, r) @unittest.skipIf(noCSE, 'No CSEBase') - def test_RETpast(self) -> None: + def test_RETpastFail(self) -> None: """ RETRIEVE with RQET absolute in the past -> FAIL """ r, rsc = RETRIEVE(aeURL, TestRequests.originator, headers={ C.hfRET : DateUtils.getResourceDate(-10)}) self.assertEqual(rsc, RC.REQUEST_TIMEOUT, r) @@ -123,8 +125,8 @@ def test_RETfuture(self) -> None: @unittest.skipIf(noCSE, 'No CSEBase') - def test_RETpastSeconds(self) -> None: - """ RETRIEVE with RQET seconds in the past """ + def test_RETpastSecondsFail(self) -> None: + """ RETRIEVE with RQET seconds in the past -> Fail""" r, rsc = RETRIEVE(aeURL, TestRequests.originator, headers={ C.hfRET : f'{-expirationCheckDelay*1000}'}) self.assertEqual(rsc, RC.REQUEST_TIMEOUT, r) @@ -144,12 +146,47 @@ def test_OETRETfutureSeconds(self) -> None: @unittest.skipIf(noCSE, 'No CSEBase') - def test_OETRETfutureSecondsWrong(self) -> None: - """ RETRIEVE with OET > RQET seconds in the future """ + def test_OETRETfutureSecondsWrongFail(self) -> None: + """ RETRIEVE with OET > RQET seconds in the future -> Fail""" r, rsc = RETRIEVE(aeURL, TestRequests.originator, headers={ C.hfRET : f'{expirationCheckDelay*1000/2}', C.hfOET : f'{expirationCheckDelay*1000}'}) self.assertEqual(rsc, RC.REQUEST_TIMEOUT, r) + @unittest.skipIf(noCSE, 'No CSEBase') + def test_RSETsmallerThanRETFail(self) -> None: + """ RETRIEVE with RET < RSET - Fail """ + r, rsc = RETRIEVE(aeURL, TestRequests.originator, headers={ C.hfRET : f'{expirationCheckDelay*2000}', C.hfRST : f'{expirationCheckDelay*1000}'}) + self.assertEqual(rsc, RC.BAD_REQUEST, r) + + + @unittest.skipIf(noCSE, 'No CSEBase') + def test_RSETpastFail(self) -> None: + """ RETRIEVE with RSET < now - Fail """ + r, rsc = RETRIEVE(aeURL, TestRequests.originator, headers={ C.hfRST : f'-{expirationCheckDelay*2000}'}) + self.assertEqual(rsc, RC.REQUEST_TIMEOUT, r) + + + @unittest.skipIf(noCSE, 'No CSEBase') + def test_RSETNonBlockingSynchFail(self) -> None: + """ Retrieve non-blocking synchronous with short RSET -> Fail""" + + _rset = expirationCheckDelay * 1000 + r, rsc = RETRIEVE(f'{aeURL}?rt={int(ResponseType.nonBlockingRequestSynch)}', + TestRequests.originator, + headers={ C.hfRST : f'{_rset}'}) + headers = lastHeaders() + self.assertEqual(rsc, RC.ACCEPTED_NON_BLOCKING_REQUEST_SYNC, r) + self.assertIsNotNone(findXPath(r, 'm2m:uri')) + self.assertIn(C.hfRST, headers) + self.assertEqual(headers[C.hfRST], f'{_rset}') + requestURI = findXPath(r, 'm2m:uri') + + # get and check resource + testSleep(requestExpirationDelay * 2) + r, rsc = RETRIEVE(f'{csiURL}/{requestURI}', TestRequests.originator) + self.assertEqual(rsc, RC.NOT_FOUND, r) + + def run(testFailFast:bool) -> Tuple[int, int, int, float]: suite = unittest.TestSuite() @@ -158,13 +195,20 @@ def run(testFailFast:bool) -> Tuple[int, int, int, float]: addTest(suite, TestRequests('test_OETfuture')) addTest(suite, TestRequests('test_OETfuturePeriod')) addTest(suite, TestRequests('test_OETfutureSeconds')) - addTest(suite, TestRequests('test_RETnow')) - addTest(suite, TestRequests('test_RETpast')) + + addTest(suite, TestRequests('test_RETnowFail')) + addTest(suite, TestRequests('test_RETpastFail')) addTest(suite, TestRequests('test_RETfuture')) - addTest(suite, TestRequests('test_RETpastSeconds')) + addTest(suite, TestRequests('test_RETpastSecondsFail')) addTest(suite, TestRequests('test_RETfutureSeconds')) + addTest(suite, TestRequests('test_OETRETfutureSeconds')) - addTest(suite, TestRequests('test_OETRETfutureSecondsWrong')) + addTest(suite, TestRequests('test_OETRETfutureSecondsWrongFail')) + + addTest(suite, TestRequests('test_RSETsmallerThanRETFail')) + addTest(suite, TestRequests('test_RSETpastFail')) + + addTest(suite, TestRequests('test_RSETNonBlockingSynchFail')) result = unittest.TextTestRunner(verbosity=testVerbosity, failfast=testFailFast).run(suite) diff --git a/tests/testSCH.py b/tests/testSCH.py new file mode 100644 index 00000000..d890bbc4 --- /dev/null +++ b/tests/testSCH.py @@ -0,0 +1,639 @@ + # +# testSCH.py +# +# (c) 2023 by Andreas Kraft +# License: BSD 3-Clause License. See the LICENSE file for further details. +# +# Unit tests for Schedule functionality +# + +import unittest, sys +if '..' not in sys.path: + sys.path.append('..') +from typing import Tuple +from acme.etc.Types import ResourceTypes as T, ResponseStatusCode as RC, TimeWindowType +from acme.etc.Types import NotificationEventType, NotificationEventType as NET +from init import * +from datetime import timedelta + +nodeID = 'urn:sn:1234' + +def createScheduleString(range:int, delay:int = 0) -> str: + """ Create schedule string for range seconds """ + dts = datetime.now(tz = timezone.utc) + timedelta(seconds = delay) + dte = dts + timedelta(seconds = range) + return f'{dts.second}-{dte.second} {dts.minute}-{dte.minute} {dts.hour}-{dte.hour} * * * *' + + +class TestSCH(unittest.TestCase): + + ae = None + aeRI = None + ae2 = None + nod = None + nodRI = None + crs = None + crsRI = None + + + originator = None + + @classmethod + @unittest.skipIf(noCSE, 'No CSEBase') + def setUpClass(cls) -> None: + testCaseStart('Setup TestSCH') + + # Start notification server + startNotificationServer() + + + dct = { 'm2m:ae' : { + 'rn' : aeRN, + 'api' : APPID, + 'rr' : True, + 'srv' : [ RELEASEVERSION ] + }} + cls.ae, rsc = CREATE(cseURL, 'C', T.AE, dct) # AE to work under + assert rsc == RC.CREATED, 'cannot create parent AE' + cls.originator = findXPath(cls.ae, 'm2m:ae/aei') + cls.aeRI = findXPath(cls.ae, 'm2m:ae/ri') + + + dct = { 'm2m:nod' : { + 'rn' : nodRN, + 'ni' : nodeID + }} + cls.nod, rsc = CREATE(cseURL, ORIGINATOR, T.NOD, dct) + assert rsc == RC.CREATED + cls.nodRI = findXPath(cls.nod, 'm2m:nod/ri') + + dct = { 'm2m:crs' : { + 'rn' : crsRN, + 'nu' : [ NOTIFICATIONSERVER ], + 'twt' : TimeWindowType.PERIODICWINDOW, + 'tws' : f'PT{crsTimeWindowSize}S', + 'rrat' : [ cls.nodRI ], + 'encs' : { + 'enc' : [ + { + 'net': [ NotificationEventType.createDirectChild ], + } + ] + } + + + }} + cls.nod, rsc = CREATE(cseURL, ORIGINATOR, T.CRS, dct) + assert rsc == RC.CREATED + cls.crsRI = findXPath(cls.nod, 'm2m:crs/ri') + + + testCaseEnd('Setup TestSCH') + + + @classmethod + @unittest.skipIf(noCSE, 'No CSEBase') + def tearDownClass(cls) -> None: + if not isTearDownEnabled(): + stopNotificationServer() + return + testCaseStart('TearDown TestSCH') + DELETE(aeURL, ORIGINATOR) # Just delete the AE and everything below it. Ignore whether it exists or not + DELETE(nodURL, ORIGINATOR) # Just delete the NOD and everything below it. Ignore whether it exists or not + DELETE(f'{cseURL}/{crsRN}', ORIGINATOR) + DELETE(f'{cseURL}/{schRN}', ORIGINATOR) + testCaseEnd('TearDown TestSCH') + + + def setUp(self) -> None: + testCaseStart(self._testMethodName) + + + def tearDown(self) -> None: + testCaseEnd(self._testMethodName) + + + ######################################################################### + +# TODO validate schedule element format ***** + + @unittest.skipIf(noCSE, 'No CSEBase') + def test_createSCHunderCBwithNOCFail(self) -> None: + """ CREATE invalid with "nco" under CSEBase -> Fail""" + self.assertIsNotNone(TestSCH.ae) + dct = { 'm2m:sch' : { + 'rn' : schRN, + 'se': { 'sce': [ '* * * * * * *' ] }, + 'nco': True + }} + r, rsc = CREATE(cseURL, ORIGINATOR, T.SCH, dct) + self.assertEqual(rsc, RC.CONTENTS_UNACCEPTABLE, r) + + + @unittest.skipIf(noCSE, 'No CSEBase') + def test_createSCHunderNODwithNOCUnsupportedFail(self) -> None: + """ CREATE with nco under NOD (unsupported) -> Fail""" + self.assertIsNotNone(TestSCH.ae) + dct = { 'm2m:sch' : { + 'rn' : schRN, + 'se': { 'sce': [ '* * * * * * *' ] }, + 'nco': True + }} + r, rsc = CREATE(nodURL, ORIGINATOR, T.SCH, dct) + self.assertEqual(rsc, RC.NOT_IMPLEMENTED, r) + + + @unittest.skipIf(noCSE, 'No CSEBase') + def test_createSCHunderCBwithoutNCO(self) -> None: + """ CREATE without "nco" under CSEBase""" + self.assertIsNotNone(TestSCH.ae) + dct = { 'm2m:sch' : { + 'rn' : schRN, + 'se': { 'sce': [ '* * * * * * *' ] } + }} + r, rsc = CREATE(cseURL, ORIGINATOR, T.SCH, dct) + self.assertEqual(rsc, RC.CREATED, r) + + # DELETE again + r, rsc = DELETE(f'{cseURL}/{schRN}', ORIGINATOR) + self.assertEqual(rsc, RC.DELETED, r) + + + @unittest.skipIf(noCSE, 'No CSEBase') + def test_updateSCHunderCBwithNCOFail(self) -> None: + """ UPDATE without "nco" under CSEBase -> Fail""" + self.assertIsNotNone(TestSCH.ae) + dct:JSON = { 'm2m:sch' : { + 'rn' : schRN, + 'se': { 'sce': [ '* * * * * * *' ] } + }} + r, rsc = CREATE(cseURL, ORIGINATOR, T.SCH, dct) + self.assertEqual(rsc, RC.CREATED, r) + + # UPDATE with nco + dct = { 'm2m:sch' : { + 'nco': True + }} + r, rsc = UPDATE(f'{cseURL}/{schRN}', ORIGINATOR, dct) + self.assertEqual(rsc, RC.CONTENTS_UNACCEPTABLE, r) + + # DELETE again + r, rsc = DELETE(f'{cseURL}/{schRN}', ORIGINATOR) + self.assertEqual(rsc, RC.DELETED, r) + + + @unittest.skipIf(noCSE, 'No CSEBase') + def test_updateSCHunderNODwithNOCUnsupportedFail(self) -> None: + """ CREATE with nco under NOD (unsupported) -> Fail""" + self.assertIsNotNone(TestSCH.ae) + dct:JSON = { 'm2m:sch' : { + 'rn' : schRN, + 'se': { 'sce': [ '* * * * * * *' ] } + }} + r, rsc = CREATE(nodURL, ORIGINATOR, T.SCH, dct) + self.assertEqual(rsc, RC.CREATED, r) + + # UPDATE with nco + dct = { 'm2m:sch' : { + 'nco': True + }} + r, rsc = UPDATE(f'{nodURL}/{schRN}', ORIGINATOR, dct) + self.assertEqual(rsc, RC.NOT_IMPLEMENTED, r) + + # DELETE again + r, rsc = DELETE(f'{nodURL}/{schRN}', ORIGINATOR) + self.assertEqual(rsc, RC.DELETED, r) + + + # + # Testing CREATE with different parent types + # + + @unittest.skipIf(noCSE, 'No CSEBase') + def test_createSCHunderSUBwrongRn(self) -> None: + """ CREATE with wrong rn under -> Fail""" + # create + dct:JSON = { 'm2m:sub' : { + 'rn' : f'{subRN}', + 'enc': { + 'net': [ NotificationEventType.resourceUpdate ] + }, + 'nu': [ NOTIFICATIONSERVER ] + }} + r, rsc = CREATE(cseURL, ORIGINATOR, T.SUB, dct) + self.assertEqual(rsc, RC.CREATED, r) + + # create with wrong rn + dct = { 'm2m:sch' : { + 'rn' : 'wrong', + 'se': { 'sce': [ '* * * * * * *' ] } + }} + r, rsc = CREATE(f'{cseURL}/{subRN}', ORIGINATOR, T.SCH, dct) + self.assertEqual(rsc, RC.BAD_REQUEST, r) + + # DELETE SUB again + r, rsc = DELETE(f'{cseURL}/{subRN}', ORIGINATOR) + self.assertEqual(rsc, RC.DELETED, r) + + + @unittest.skipIf(noCSE, 'No CSEBase') + def test_createSCHunderSUBemptyRn(self) -> None: + """ CREATE with empty rn under """ + # create + dct:JSON = { 'm2m:sub' : { + 'rn' : f'{subRN}', + 'enc': { + 'net': [ NotificationEventType.resourceUpdate ] + }, + 'nu': [ NOTIFICATIONSERVER ] + }} + r, rsc = CREATE(cseURL, ORIGINATOR, T.SUB, dct) + self.assertEqual(rsc, RC.CREATED, r) + + dct = { 'm2m:sch' : { + 'se': { 'sce': [ '* * * * * * *' ] } + }} + r, rsc = CREATE(f'{cseURL}/{subRN}', ORIGINATOR, T.SCH, dct) + self.assertEqual(rsc, RC.CREATED, r) + + # DELETE again + r, rsc = DELETE(f'{cseURL}/{subRN}', ORIGINATOR) + self.assertEqual(rsc, RC.DELETED, r) + + + @unittest.skipIf(noCSE, 'No CSEBase') + def test_createSCHunderSUBcorrectRn(self) -> None: + """ CREATE with correct rn under """ + # create + dct:JSON = { 'm2m:sub' : { + 'rn' : f'{subRN}', + 'enc': { + 'net': [ NotificationEventType.resourceUpdate ] + }, + 'nu': [ NOTIFICATIONSERVER ] + }} + r, rsc = CREATE(cseURL, ORIGINATOR, T.SUB, dct) + self.assertEqual(rsc, RC.CREATED, r) + + dct = { 'm2m:sch' : { + 'rn' : 'notificationSchedule', + 'se': { 'sce': [ '* * * * * * *' ] } + }} + r, rsc = CREATE(f'{cseURL}/{subRN}', ORIGINATOR, T.SCH, dct) + self.assertEqual(rsc, RC.CREATED, r) + + # DELETE again + r, rsc = DELETE(f'{cseURL}/{subRN}', ORIGINATOR) + self.assertEqual(rsc, RC.DELETED, r) + + + @unittest.skipIf(noCSE, 'No CSEBase') + def test_createSCHunderCRSwrongRn(self) -> None: + """ CREATE with wrong rn under -> Fail""" + dct:JSON = { 'm2m:sch' : { + 'rn' : 'wrong', + 'se': { 'sce': [ '* * * * * * *' ] } + }} + r, rsc = CREATE(f'{cseURL}/{crsRN}', ORIGINATOR, T.SCH, dct) + self.assertEqual(rsc, RC.BAD_REQUEST, r) + + + @unittest.skipIf(noCSE, 'No CSEBase') + def test_createSCHunderCRSemptyRn(self) -> None: + """ CREATE with empty rn under """ + dct:JSON = { 'm2m:sch' : { + 'se': { 'sce': [ '* * * * * * *' ] } + }} + r, rsc = CREATE(f'{cseURL}/{crsRN}', ORIGINATOR, T.SCH, dct) + self.assertEqual(rsc, RC.CREATED, r) + + # DELETE again + r, rsc = DELETE(f'{cseURL}/{crsRN}/notificationSchedule', ORIGINATOR) + self.assertEqual(rsc, RC.DELETED, r) + + + @unittest.skipIf(noCSE, 'No CSEBase') + def test_createSCHunderCRScorrectRn(self) -> None: + """ CREATE with correct rn under """ + dct:JSON = { 'm2m:sch' : { + 'rn' : 'notificationSchedule', + 'se': { 'sce': [ '* * * * * * *' ] } + }} + r, rsc = CREATE(f'{cseURL}/{crsRN}', ORIGINATOR, T.SCH, dct) + self.assertEqual(rsc, RC.CREATED, r) + + # DELETE again + r, rsc = DELETE(f'{cseURL}/{crsRN}/notificationSchedule', ORIGINATOR) + self.assertEqual(rsc, RC.DELETED, r) + + + @unittest.skipIf(noCSE, 'No CSEBase') + def test_createSCHunderCB(self) -> None: + """ CREATE under CB""" + dct:JSON = { 'm2m:sch' : { + 'rn' : 'schedule', + 'se': { 'sce': [ '* * * * * * *' ] } + }} + r, rsc = CREATE(cseURL, ORIGINATOR, T.SCH, dct) + self.assertEqual(rsc, RC.CREATED, r) + + # DELETE again + r, rsc = DELETE(f'{cseURL}/schedule', ORIGINATOR) + self.assertEqual(rsc, RC.DELETED, r) + + + @unittest.skipIf(noCSE, 'No CSEBase') + def test_createSCHunderCBTwiceFail(self) -> None: + """ CREATE under CB twice -> Fail""" + dct:JSON = { 'm2m:sch' : { + 'rn' : 'schedule', + 'se': { 'sce': [ '* * * * * * *' ] } + }} + r, rsc = CREATE(cseURL, ORIGINATOR, T.SCH, dct) + self.assertEqual(rsc, RC.CREATED, r) + + # second create + dct = { 'm2m:sch' : { + 'rn' : 'schedule2', + 'se': { 'sce': [ '* * * * * * *' ] } + }} + r, rsc = CREATE(cseURL, ORIGINATOR, T.SCH, dct) + self.assertEqual(rsc, RC.BAD_REQUEST, r) + + # DELETE again + r, rsc = DELETE(f'{cseURL}/schedule', ORIGINATOR) + self.assertEqual(rsc, RC.DELETED, r) + + + @unittest.skipIf(noCSE, 'No CSEBase') + def test_createSCHunderNOD(self) -> None: + """ CREATE under """ + dct:JSON = { 'm2m:sch' : { + 'rn' : 'schedule', + 'se': { 'sce': [ '* * * * * * *' ] } + }} + r, rsc = CREATE(nodURL, ORIGINATOR, T.SCH, dct) + self.assertEqual(rsc, RC.CREATED, r) + + # DELETE again + r, rsc = DELETE(f'{nodURL}/schedule', ORIGINATOR) + self.assertEqual(rsc, RC.DELETED, r) + + + @unittest.skipIf(noCSE, 'No CSEBase') + def test_testSCHunderSUBinsideSchedule(self) -> None: + """ CREATE under and receive notification within schedule """ + # create + dct:JSON = { 'm2m:sub' : { + 'rn' : f'{subRN}', + 'enc': { + 'net': [ NotificationEventType.resourceUpdate ] + }, + 'nu': [ NOTIFICATIONSERVER ] + }} + r, rsc = CREATE(aeURL, TestSCH.originator, T.SUB, dct) + self.assertEqual(rsc, RC.CREATED, r) + + dct = { 'm2m:sch' : { + 'rn' : 'notificationSchedule', + 'se': { 'sce': [ createScheduleString(requestCheckDelay * 2) ] } + }} + r, rsc = CREATE(f'{aeURL}/{subRN}', TestSCH.originator, T.SCH, dct) + self.assertEqual(rsc, RC.CREATED, r) + + # Update the AE to trigger a notification immediately + clearLastNotification() + dct = { 'm2m:ae' : { + 'lbl' : ['test'] + }} + r, rsc = UPDATE(aeURL, TestSCH.originator, dct) + self.assertEqual(rsc, RC.UPDATED, r) + + # Check notification + testSleep(requestCheckDelay) + notification = getLastNotification() + self.assertIsNotNone(notification) # notification received + + # DELETE again + r, rsc = DELETE(f'{aeURL}/{subRN}', ORIGINATOR) + self.assertEqual(rsc, RC.DELETED, r) + + + @unittest.skipIf(noCSE, 'No CSEBase') + def test_testSCHunderSUBoutsideSchedule(self) -> None: + """ CREATE under and receive notification outside schedule """ + # create + dct:JSON = { 'm2m:sub' : { + 'rn' : f'{subRN}', + 'enc': { + 'net': [ NotificationEventType.resourceUpdate ] + }, + 'nu': [ NOTIFICATIONSERVER ] + }} + r, rsc = CREATE(aeURL, TestSCH.originator, T.SUB, dct) + self.assertEqual(rsc, RC.CREATED, r) + + # add schedule + dct = { 'm2m:sch' : { + 'rn' : 'notificationSchedule', + 'se': { 'sce': [ createScheduleString(requestCheckDelay * 2, requestCheckDelay * 2) ] } + }} + r, rsc = CREATE(f'{aeURL}/{subRN}', TestSCH.originator, T.SCH, dct) + self.assertEqual(rsc, RC.CREATED, r) + + # Update the AE to trigger a notification immediately + clearLastNotification() + dct = { 'm2m:ae' : { + 'lbl' : ['test'] + }} + r, rsc = UPDATE(aeURL, TestSCH.originator, dct) + self.assertEqual(rsc, RC.UPDATED, r) + + # Check notification + testSleep(requestCheckDelay) # wait a short time but run before the schedule starts + notification = getLastNotification() + self.assertIsNone(notification) # notification received + + # DELETE again + r, rsc = DELETE(f'{aeURL}/{subRN}', ORIGINATOR) + self.assertEqual(rsc, RC.DELETED, r) + + + @unittest.skipIf(noCSE, 'No CSEBase') + def test_testSCHunderSUBoutsideScheduleImmediate(self) -> None: + """ CREATE under and receive notification outside schedule, nec = immediate """ + # create + dct:JSON = { 'm2m:sub' : { + 'rn' : f'{subRN}', + 'enc': { + 'net': [ NotificationEventType.resourceUpdate ], + }, + 'nec': 2, # immediate notification + 'nu': [ NOTIFICATIONSERVER ] + }} + r, rsc = CREATE(aeURL, TestSCH.originator, T.SUB, dct) + self.assertEqual(rsc, RC.CREATED, r) + + # Add schedule + dct = { 'm2m:sch' : { + 'rn' : 'notificationSchedule', + 'se': { 'sce': [ createScheduleString(requestCheckDelay * 2, requestCheckDelay * 2) ] } + }} + r, rsc = CREATE(f'{aeURL}/{subRN}', TestSCH.originator, T.SCH, dct) + self.assertEqual(rsc, RC.CREATED, r) + + # Update the AE to trigger a notification immediately + clearLastNotification() + dct = { 'm2m:ae' : { + 'lbl' : ['test'] + }} + r, rsc = UPDATE(aeURL, TestSCH.originator, dct) + self.assertEqual(rsc, RC.UPDATED, r) + + # Check notification + testSleep(requestCheckDelay) # wait a short time but run before the schedule starts + notification = getLastNotification() + self.assertIsNotNone(notification) # notification received + + # DELETE again + r, rsc = DELETE(f'{aeURL}/{subRN}', ORIGINATOR) + self.assertEqual(rsc, RC.DELETED, r) + + + # + # Testing crossResourceSubscription with schedule + # + + @unittest.skipIf(noCSE, 'No CSEBase') + def test_testSCHunderCRSinsideSchedule(self) -> None: + """ CREATE under and receive notification within schedule """ + # create + dct = { 'm2m:crs' : { + 'rn' : crsRN, + 'nu' : [ NOTIFICATIONSERVER ], + 'twt': 1, + 'eem': 1, # all events present + 'tws' : f'PT{requestCheckDelay}S', + 'rrat': [ self.aeRI], + 'encs': { + 'enc' : [ + { + 'net': [ NET.resourceUpdate ], + } + ] + } + }} + r, rsc = CREATE(aeURL, TestSCH.originator, T.CRS, dct) + self.assertEqual(rsc, RC.CREATED, r) + + # Add schedule + dct = { 'm2m:sch' : { + 'rn' : 'notificationSchedule', + 'se': { 'sce': [ createScheduleString(requestCheckDelay * 2) ] } + }} + r, rsc = CREATE(f'{aeURL}/{crsRN}', TestSCH.originator, T.SCH, dct) + self.assertEqual(rsc, RC.CREATED, r) + + # # Update the AE to trigger a notification immediately + clearLastNotification() + dct = { 'm2m:ae' : { + 'lbl' : ['test'] + }} + r, rsc = UPDATE(aeURL, TestSCH.originator, dct) + self.assertEqual(rsc, RC.UPDATED, r) + + # # Check notification + testSleep(requestCheckDelay * 2) + notification = getLastNotification() + self.assertIsNotNone(notification) # notification received + + # DELETE again + r, rsc = DELETE(f'{aeURL}/{crsRN}', TestSCH.originator) + self.assertEqual(rsc, RC.DELETED, r) + + + @unittest.skipIf(noCSE, 'No CSEBase') + def test_testSCHunderCRSoutsideScheduleFail(self) -> None: + """ CREATE under and receive notification outside schedule -> Fail """ + # create + dct = { 'm2m:crs' : { + 'rn' : crsRN, + 'nu' : [ NOTIFICATIONSERVER ], + 'twt': 1, + 'eem': 1, # all events present + 'tws' : f'PT{requestCheckDelay}S', + 'rrat': [ self.aeRI], + 'encs': { + 'enc' : [ + { + 'net': [ NET.resourceUpdate ], + } + ] + } + }} + r, rsc = CREATE(aeURL, TestSCH.originator, T.CRS, dct) + self.assertEqual(rsc, RC.CREATED, r) + + # Add schedule + dct = { 'm2m:sch' : { + 'rn' : 'notificationSchedule', + 'se': { 'sce': [ createScheduleString(requestCheckDelay * 2, requestCheckDelay * 4) ] } # outside time window + }} + r, rsc = CREATE(f'{aeURL}/{crsRN}', TestSCH.originator, T.SCH, dct) + self.assertEqual(rsc, RC.CREATED, r) + + # # Update the AE to trigger a notification immediately, but outside schedule + clearLastNotification() + dct = { 'm2m:ae' : { + 'lbl' : ['test'] + }} + r, rsc = UPDATE(aeURL, TestSCH.originator, dct) + self.assertEqual(rsc, RC.UPDATED, r) + + # # Check notification + testSleep(requestCheckDelay * 2) + notification = getLastNotification() + self.assertIsNone(notification) # NO notification received + + # DELETE again + r, rsc = DELETE(f'{aeURL}/{crsRN}', TestSCH.originator) + self.assertEqual(rsc, RC.DELETED, r) + + +def run(testFailFast:bool) -> Tuple[int, int, int, float]: + suite = unittest.TestSuite() + + # basic tests + addTest(suite, TestSCH('test_createSCHunderCBwithNOCFail')) + addTest(suite, TestSCH('test_createSCHunderNODwithNOCUnsupportedFail')) + addTest(suite, TestSCH('test_createSCHunderCBwithoutNCO')) + addTest(suite, TestSCH('test_updateSCHunderCBwithNCOFail')) + addTest(suite, TestSCH('test_updateSCHunderNODwithNOCUnsupportedFail')) + + # testing for specific parent types + addTest(suite, TestSCH('test_createSCHunderSUBwrongRn')) + addTest(suite, TestSCH('test_createSCHunderSUBemptyRn')) + addTest(suite, TestSCH('test_createSCHunderSUBcorrectRn')) + addTest(suite, TestSCH('test_createSCHunderCRSwrongRn')) + addTest(suite, TestSCH('test_createSCHunderCRSemptyRn')) + addTest(suite, TestSCH('test_createSCHunderCRScorrectRn')) + addTest(suite, TestSCH('test_createSCHunderCB')) + addTest(suite, TestSCH('test_createSCHunderCBTwiceFail')) + addTest(suite, TestSCH('test_createSCHunderNOD')) + + # testing subscriptions with schedule + addTest(suite, TestSCH('test_testSCHunderSUBinsideSchedule')) + addTest(suite, TestSCH('test_testSCHunderSUBoutsideSchedule')) + addTest(suite, TestSCH('test_testSCHunderSUBoutsideScheduleImmediate')) + + # testing crossResourceSubscription with schedule + addTest(suite, TestSCH('test_testSCHunderCRSinsideSchedule')) + addTest(suite, TestSCH('test_testSCHunderCRSoutsideScheduleFail')) + + result = unittest.TextTestRunner(verbosity = testVerbosity, failfast = testFailFast).run(suite) + return result.testsRun, len(result.errors + result.failures), len(result.skipped), getSleepTimeCount() + + +if __name__ == '__main__': + r, errors, s, t = run(True) + sys.exit(errors) \ No newline at end of file diff --git a/tests/testSUB.py b/tests/testSUB.py index a5fddcb3..72d349a6 100644 --- a/tests/testSUB.py +++ b/tests/testSUB.py @@ -313,8 +313,8 @@ def test_deleteSUBByUnknownOriginator(self) -> None: @unittest.skipIf(noCSE, 'No CSEBase') def test_deleteSUBByAssignedOriginator(self) -> None: """ DELETE with correct originator -> Succeed. Send deletion notification. """ - _, rsc = DELETE(subURL, TestSUB.originator) - self.assertEqual(rsc, RC.DELETED) + r, rsc = DELETE(subURL, TestSUB.originator) + self.assertEqual(rsc, RC.DELETED, r) lastNotification = getLastNotification() # no delay! blocking self.assertTrue(findXPath(lastNotification, 'm2m:sgn/sud')) @@ -1441,9 +1441,7 @@ def test_createSUBForNotificationStats(self) -> None: self.assertEqual(rsc, RC.CREATED) self.assertIsNotNone(findXPath(r, 'm2m:sub/nse')) self.assertEqual(findXPath(r, 'm2m:sub/nse'), True) - self.assertIsNotNone(findXPath(r, 'm2m:sub/nsi')) - self.assertIsInstance(findXPath(r, 'm2m:sub/nsi'), list) - self.assertEqual(len(findXPath(r, 'm2m:sub/nsi')), 1) # Verification request doesn't count + self.assertIsNone(findXPath(r, 'm2m:sub/nsi')) lastNotification = getLastNotification() # no delay! blocking self.assertTrue(findXPath(lastNotification, 'm2m:sgn/vrq')) @@ -1476,6 +1474,7 @@ def test_updateSUBNSEFalse(self) -> None: }} r, rsc = UPDATE(f'{self.aePOAURL}/{subRN}', TestSUB.originatorPoa, dct) self.assertEqual(rsc, RC.UPDATED, r) + # nsi is kept after nse update to FALSE self.assertEqual(findXPath(r, 'm2m:sub/nsi/{0}/rqs'), 1, r) # Change counts if order of TC changes self.assertEqual(findXPath(r, 'm2m:sub/nsi/{0}/rsr'), 1, r) self.assertEqual(findXPath(r, 'm2m:sub/nsi/{0}/noec'), 1, r) @@ -1489,11 +1488,8 @@ def test_updateSUBNSETrue(self) -> None: }} r, rsc = UPDATE(f'{self.aePOAURL}/{subRN}', TestSUB.originatorPoa, dct) self.assertEqual(rsc, RC.UPDATED, r) - self.assertEqual(len(findXPath(r, 'm2m:sub/nsi')), 1, r) - # Must be empty - self.assertEqual(findXPath(r, 'm2m:sub/nsi/{0}/rqs'), 0, r) - self.assertEqual(findXPath(r, 'm2m:sub/nsi/{0}/rsr'), 0, r) - self.assertEqual(findXPath(r, 'm2m:sub/nsi/{0}/noec'), 0, r) + # nsi must be empty + self.assertIsNone(findXPath(r, 'm2m:sub/nsi'), r) @unittest.skipIf(noCSE, 'No CSEBase') @@ -1522,10 +1518,8 @@ def test_updateSUBNSETrueAgain(self) -> None: }} r, rsc = UPDATE(f'{self.aePOAURL}/{subRN}', TestSUB.originatorPoa, dct) self.assertEqual(rsc, RC.UPDATED, r) - self.assertEqual(len(findXPath(r, 'm2m:sub/nsi')), 1, r) - self.assertEqual(findXPath(r, 'm2m:sub/nsi/{0}/rqs'), 0, r) # Change counts if order of TC changes - self.assertEqual(findXPath(r, 'm2m:sub/nsi/{0}/rsr'), 0, r) - self.assertEqual(findXPath(r, 'm2m:sub/nsi/{0}/noec'), 0, r) + # nsi must be empty + self.assertIsNone(findXPath(r, 'm2m:sub/nsi'), r) @unittest.skipIf(noCSE, 'No CSEBase') @@ -1542,10 +1536,9 @@ def test_updateSUBcountBatchNotifications(self) -> None: r, rsc = UPDATE(f'{self.aePOAURL}/{subRN}', TestSUB.originatorPoa, dct) self.assertEqual(rsc, RC.UPDATED, r) self.assertIsNotNone(findXPath(r, 'm2m:sub/bn/num'), r) - self.assertEqual(len(findXPath(r, 'm2m:sub/nsi')), 1, r) # - self.assertEqual(findXPath(r, 'm2m:sub/nsi/{0}/rqs'), 0, r) - self.assertEqual(findXPath(r, 'm2m:sub/nsi/{0}/rsr'), 0, r) - self.assertEqual(findXPath(r, 'm2m:sub/nsi/{0}/noec'), 0, r) + # nsi must be empty + self.assertIsNone(findXPath(r, 'm2m:sub/nsi'), r) + # Make some Updates and cause a batch notification for _ in range(numberOfBatchNotifications): @@ -1564,6 +1557,8 @@ def test_updateSUBcountBatchNotifications(self) -> None: # retrieve to get the stats r, rsc = RETRIEVE(f'{self.aePOAURL}/{subRN}', TestSUB.originatorPoa) self.assertEqual(rsc, RC.OK, r) + # nsi must now be set + self.assertIsNotNone(findXPath(r, 'm2m:sub/nsi'), r) self.assertEqual(findXPath(r, 'm2m:sub/nsi/{0}/rqs'), 5, r) # Change counts if order of TC changes self.assertEqual(findXPath(r, 'm2m:sub/nsi/{0}/rsr'), 5, r) self.assertEqual(findXPath(r, 'm2m:sub/nsi/{0}/noec'), 5, r) @@ -1596,8 +1591,8 @@ def test_createSUBnoNCTwrongNETFail(self) -> None: 'su': NOTIFICATIONSERVER, 'nse': True }} - r, rsc = CREATE(self.aePOAURL, TestSUB.originatorPoa, T.SUB, dct) - self.assertEqual(rsc, RC.BAD_REQUEST) + r, rsc = CREATE(aeURL, TestSUB.originator, T.SUB, dct) + self.assertEqual(rsc, RC.BAD_REQUEST, r) # diff --git a/tools/apidocs/README.md b/tools/apidocs/README.md new file mode 100644 index 00000000..0897a02d --- /dev/null +++ b/tools/apidocs/README.md @@ -0,0 +1,37 @@ +# Generate ACME API Documentation + +This document provides instructions how to generate API documentation for +the ACME CSE implementation. + + +## Installation + +Install the following packages via pip: + +- To generate only the API documentation: + + pip3 install pydoctor + +- To generate additionally a [Dash][1] docset: + + pip3 install doc2dash + +## Generate the API Documentation and Docset + +Run the following commands from within the *tools/apidocs* directory: + +- To generate the API documentation in the sub-directory `apidocs`. + + pydoctor + + Configuration and command arguments are read from the *pydoctor.ini* configuration file in the same directory. + +- To generate a [Dash][1] docset and automatically add it to Dash: + + doc2dash ../../docs/apidocs -a -f -n "ACME oneM2M CSE" + + + + +[1]: https://kapeli.com/dash + diff --git a/tools/apidocs/pydoctor.ini b/tools/apidocs/pydoctor.ini new file mode 100644 index 00000000..b665c3bd --- /dev/null +++ b/tools/apidocs/pydoctor.ini @@ -0,0 +1,9 @@ +[pydoctor] +add-package = ../../acme +project-base-dir = ../.. +project-name = ACME oneM2M CSE +project-version = 0.13.0-dev +project-url = https://github.com/ankraft/ACME-oneM2M-CSE +docformat = google +theme = readthedocs +html-output = ../../docs/apidocs diff --git a/tools/apidocs/setup.py b/tools/apidocs/setup.py new file mode 100644 index 00000000..365a284b --- /dev/null +++ b/tools/apidocs/setup.py @@ -0,0 +1,48 @@ +from setuptools import setup + +import pathlib + +# The directory containing this file +HERE = pathlib.Path(__file__).parent + +# The text of the README file +README = (HERE / 'README.md').read_text() + +setup( + name='ACME-oneM2M-CSE', + version='0.13.0', + url='https://github.com/ankraft/ACME-oneM2M-CSE', + author='Andreas Kraft', + author_email='an.kraft@gmail.com', + description='An open source CSE Middleware for Education', + long_description=README, + long_description_content_type='text/markdown', + license='BSD', + classifiers=[ + 'License :: OSI Approved :: BSD License', + 'Programming Language :: Python :: 3.8', + ], + #packages=find_packages(), + packages=[ 'acme' ], + exclude=('tests',), + include_package_data=True, + install_requires=[ + 'cbor2', + 'flask', + 'flask-cors', + 'InquirerPy', + 'isodate', + 'paho-mqtt', + 'plotext', + 'rdflib', + 'requests', + 'rich', + 'tinydb', + #'package1 @ git+https://github.com/CITGuru/PyInquirer.git@9d598a53fd17a9bc42efff33183cd2d141a5c949' + ], + entry_points={ + 'console_scripts': [ + 'acme-cse=acme.__main__:main', + ] + }, +) diff --git a/tools/notificationServer/notificationServer.py b/tools/notificationServer/notificationServer.py index 736b8511..70a76ac2 100644 --- a/tools/notificationServer/notificationServer.py +++ b/tools/notificationServer/notificationServer.py @@ -82,32 +82,29 @@ def do_POST(self) -> None: self.end_headers() - - # Print JSON - if contentType in [ 'application/json', 'application/vnd.onem2m-res+json' ]: - console.print(Syntax(json.dumps(json.loads(post_data.decode('utf-8')), indent=4), - 'json', - theme='monokai', - line_numbers=False)) - - # Print CBOR - elif contentType in [ 'application/cbor', 'application/vnd.onem2m-res+cbor' ]: - console.print('[dim]Content as Hexdump:\n') - console.print(toHex(post_data), highlight=False) - console.print('\n[dim]Content as JSON:\n') - console.print(Syntax(json.dumps(cbor2.loads(post_data), indent=4), - 'json', - theme='monokai', - line_numbers=False)) - - # Print plain text formats - elif contentType in ['text/plain']: - console.print(post_data.decode(), highlight = False) - console.print() - - # Print other binary content - else: - console.print(toHex(post_data), highlight=False) + match contentType: + # Print JSON + case 'application/json', 'application/vnd.onem2m-res+json': + console.print(Syntax(json.dumps(json.loads(post_data.decode('utf-8')), indent=4), + 'json', + theme='monokai', + line_numbers=False)) + # Print CBOR + case 'application/cbor', 'application/vnd.onem2m-res+cbor': + console.print('[dim]Content as Hexdump:\n') + console.print(toHex(post_data), highlight=False) + console.print('\n[dim]Content as JSON:\n') + console.print(Syntax(json.dumps(cbor2.loads(post_data), indent=4), + 'json', + theme='monokai', + line_numbers=False)) + # Print plain text formats + case 'text/plain': + console.print(post_data.decode(), highlight = False) + console.print() + # Print other binary content + case _: + console.print(toHex(post_data), highlight=False) # Print HTTP Response # This looks a it more complicated but is necessary to render nicely in Jupyter @@ -196,38 +193,35 @@ def _constructResponse(frm:str, to:str, jsn:dict) -> dict: _frm = 'non-onem2m-entity' _to = 'unknown' encoding = 'json' - - # Print JSON + responseData = None - if encoding.upper() == 'JSON': - console.print(Syntax(json.dumps((jsn := json.loads(data)), indent=4), - 'json', - theme='monokai', - line_numbers=False)) - to = jsn['to'] if 'to' in jsn else _to - frm = jsn['fr'] if 'fr' in jsn else _frm - responseData = cast(bytes, serializeData(_constructResponse(to, frm, jsn), ContentSerializationType.JSON)) - console.print(responseData) - - - - # Print CBOR - elif encoding.upper() == 'CBOR': - console.print('[dim]Content as Hexdump:\n') - console.print(toHex(data), highlight=False) - console.print('\n[dim]Content as JSON:\n') - console.print(Syntax(json.dumps((jsn := cbor2.loads(data)), indent=4), - 'json', - theme='monokai', - line_numbers=False)) - to = jsn['to'] if 'to' in jsn else to - frm = jsn['fr'] if 'fr' in jsn else frm - responseData = cast(bytes, serializeData(_constructResponse(to, frm, jsn), ContentSerializationType.CBOR)) - - # Print other binary content - else: - console.print('[dim]Content as Hexdump:\n') - console.print(toHex(data), highlight=False) + match encoding.upper(): + # Print JSON + case 'JSON': + console.print(Syntax(json.dumps((jsn := json.loads(data)), indent=4), + 'json', + theme='monokai', + line_numbers=False)) + to = jsn['to'] if 'to' in jsn else _to + frm = jsn['fr'] if 'fr' in jsn else _frm + responseData = cast(bytes, serializeData(_constructResponse(to, frm, jsn), ContentSerializationType.JSON)) + console.print(responseData) + # Print CBOR + case 'CBOR': + console.print('[dim]Content as Hexdump:\n') + console.print(toHex(data), highlight=False) + console.print('\n[dim]Content as JSON:\n') + console.print(Syntax(json.dumps((jsn := cbor2.loads(data)), indent=4), + 'json', + theme='monokai', + line_numbers=False)) + to = jsn['to'] if 'to' in jsn else to + frm = jsn['fr'] if 'fr' in jsn else frm + responseData = cast(bytes, serializeData(_constructResponse(to, frm, jsn), ContentSerializationType.CBOR)) + # Print other binary content + case _: + console.print('[dim]Content as Hexdump:\n') + console.print(toHex(data), highlight=False) # TODO send a response if responseData: