mirror of
https://gitea.osmocom.org/sim-card/pysim.git
synced 2026-05-03 06:58:53 +03:00
Compare commits
13 Commits
neels/suci
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
593bfa0911 | ||
|
|
8fa7727a14 | ||
|
|
f1609424de | ||
|
|
1167b65e2a | ||
|
|
cd4b01f67e | ||
|
|
393de033d3 | ||
|
|
5f1c7d603c | ||
|
|
d7072e9263 | ||
|
|
ac593bb14d | ||
|
|
a95622a022 | ||
|
|
03b58985a5 | ||
|
|
cc71dbf899 | ||
|
|
aafc8d51c3 |
@@ -10,6 +10,11 @@
|
||||
|
||||
export PYTHONUNBUFFERED=1
|
||||
|
||||
setup_venv() {
|
||||
virtualenv -p python3 venv --system-site-packages
|
||||
. venv/bin/activate
|
||||
}
|
||||
|
||||
if [ ! -d "./tests/" ] ; then
|
||||
echo "###############################################"
|
||||
echo "Please call from pySim-prog top directory"
|
||||
@@ -23,8 +28,7 @@ fi
|
||||
|
||||
case "$JOB_TYPE" in
|
||||
"test")
|
||||
virtualenv -p python3 venv --system-site-packages
|
||||
. venv/bin/activate
|
||||
setup_venv
|
||||
|
||||
pip install -r requirements.txt
|
||||
pip install pyshark
|
||||
@@ -32,23 +36,27 @@ case "$JOB_TYPE" in
|
||||
# Execute automatically discovered unit tests first
|
||||
python -m unittest discover -v -s tests/unittests
|
||||
|
||||
# Run pySim-prog integration tests (requires physical cards)
|
||||
cd tests/pySim-prog_test/
|
||||
./pySim-prog_test.sh
|
||||
cd ../../
|
||||
|
||||
# Run pySim-trace test
|
||||
tests/pySim-trace_test/pySim-trace_test.sh
|
||||
;;
|
||||
"card-test") # tests requiring physical cards
|
||||
setup_venv
|
||||
|
||||
# Run pySim-shell integration tests (requires physical cards)
|
||||
pip install -r requirements.txt
|
||||
|
||||
# Run pySim-prog integration tests
|
||||
cd tests/pySim-prog_test/
|
||||
./pySim-prog_test.sh
|
||||
cd ../../
|
||||
|
||||
# Run pySim-shell integration tests
|
||||
python3 -m unittest discover -v -s ./tests/pySim-shell_test/
|
||||
|
||||
# Run pySim-smpp2sim test
|
||||
tests/pySim-smpp2sim_test/pySim-smpp2sim_test.sh
|
||||
;;
|
||||
"distcheck")
|
||||
virtualenv -p python3 venv --system-site-packages
|
||||
. venv/bin/activate
|
||||
setup_venv
|
||||
|
||||
pip install .
|
||||
pip install pyshark
|
||||
@@ -61,8 +69,7 @@ case "$JOB_TYPE" in
|
||||
# Print pylint version
|
||||
pip3 freeze | grep pylint
|
||||
|
||||
virtualenv -p python3 venv --system-site-packages
|
||||
. venv/bin/activate
|
||||
setup_venv
|
||||
|
||||
pip install .
|
||||
|
||||
@@ -80,8 +87,7 @@ case "$JOB_TYPE" in
|
||||
contrib/*.py
|
||||
;;
|
||||
"docs")
|
||||
virtualenv -p python3 venv --system-site-packages
|
||||
. venv/bin/activate
|
||||
setup_venv
|
||||
|
||||
pip install -r requirements.txt
|
||||
|
||||
|
||||
@@ -16,6 +16,12 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import requests
|
||||
from klein import Klein
|
||||
from twisted.internet import defer, protocol, ssl, task, endpoints, reactor
|
||||
from twisted.internet.posixbase import PosixReactorBase
|
||||
from pathlib import Path
|
||||
from twisted.web.server import Site, Request
|
||||
|
||||
import logging
|
||||
from datetime import datetime
|
||||
import time
|
||||
@@ -123,10 +129,12 @@ class Es2PlusApiFunction(JsonHttpApiFunction):
|
||||
class DownloadOrder(Es2PlusApiFunction):
|
||||
path = '/gsma/rsp2/es2plus/downloadOrder'
|
||||
input_params = {
|
||||
'header': JsonRequestHeader,
|
||||
'eid': param.Eid,
|
||||
'iccid': param.Iccid,
|
||||
'profileType': param.ProfileType
|
||||
}
|
||||
input_mandatory = ['header']
|
||||
output_params = {
|
||||
'header': JsonResponseHeader,
|
||||
'iccid': param.Iccid,
|
||||
@@ -137,6 +145,7 @@ class DownloadOrder(Es2PlusApiFunction):
|
||||
class ConfirmOrder(Es2PlusApiFunction):
|
||||
path = '/gsma/rsp2/es2plus/confirmOrder'
|
||||
input_params = {
|
||||
'header': JsonRequestHeader,
|
||||
'iccid': param.Iccid,
|
||||
'eid': param.Eid,
|
||||
'matchingId': param.MatchingId,
|
||||
@@ -144,7 +153,7 @@ class ConfirmOrder(Es2PlusApiFunction):
|
||||
'smdsAddress': param.SmdsAddress,
|
||||
'releaseFlag': param.ReleaseFlag,
|
||||
}
|
||||
input_mandatory = ['iccid', 'releaseFlag']
|
||||
input_mandatory = ['header', 'iccid', 'releaseFlag']
|
||||
output_params = {
|
||||
'header': JsonResponseHeader,
|
||||
'eid': param.Eid,
|
||||
@@ -157,12 +166,13 @@ class ConfirmOrder(Es2PlusApiFunction):
|
||||
class CancelOrder(Es2PlusApiFunction):
|
||||
path = '/gsma/rsp2/es2plus/cancelOrder'
|
||||
input_params = {
|
||||
'header': JsonRequestHeader,
|
||||
'iccid': param.Iccid,
|
||||
'eid': param.Eid,
|
||||
'matchingId': param.MatchingId,
|
||||
'finalProfileStatusIndicator': param.FinalProfileStatusIndicator,
|
||||
}
|
||||
input_mandatory = ['finalProfileStatusIndicator', 'iccid']
|
||||
input_mandatory = ['header', 'finalProfileStatusIndicator', 'iccid']
|
||||
output_params = {
|
||||
'header': JsonResponseHeader,
|
||||
}
|
||||
@@ -172,9 +182,10 @@ class CancelOrder(Es2PlusApiFunction):
|
||||
class ReleaseProfile(Es2PlusApiFunction):
|
||||
path = '/gsma/rsp2/es2plus/releaseProfile'
|
||||
input_params = {
|
||||
'header': JsonRequestHeader,
|
||||
'iccid': param.Iccid,
|
||||
}
|
||||
input_mandatory = ['iccid']
|
||||
input_mandatory = ['header', 'iccid']
|
||||
output_params = {
|
||||
'header': JsonResponseHeader,
|
||||
}
|
||||
@@ -184,6 +195,7 @@ class ReleaseProfile(Es2PlusApiFunction):
|
||||
class HandleDownloadProgressInfo(Es2PlusApiFunction):
|
||||
path = '/gsma/rsp2/es2plus/handleDownloadProgressInfo'
|
||||
input_params = {
|
||||
'header': JsonRequestHeader,
|
||||
'eid': param.Eid,
|
||||
'iccid': param.Iccid,
|
||||
'profileType': param.ProfileType,
|
||||
@@ -192,10 +204,9 @@ class HandleDownloadProgressInfo(Es2PlusApiFunction):
|
||||
'notificationPointStatus': param.NotificationPointStatus,
|
||||
'resultData': param.ResultData,
|
||||
}
|
||||
input_mandatory = ['iccid', 'profileType', 'timestamp', 'notificationPointId', 'notificationPointStatus']
|
||||
input_mandatory = ['header', 'iccid', 'profileType', 'timestamp', 'notificationPointId', 'notificationPointStatus']
|
||||
expected_http_status = 204
|
||||
|
||||
|
||||
class Es2pApiClient:
|
||||
"""Main class representing a full ES2+ API client. Has one method for each API function."""
|
||||
def __init__(self, url_prefix:str, func_req_id:str, server_cert_verify: str = None, client_cert: str = None):
|
||||
@@ -206,18 +217,17 @@ class Es2pApiClient:
|
||||
if client_cert:
|
||||
self.session.cert = client_cert
|
||||
|
||||
self.downloadOrder = DownloadOrder(url_prefix, func_req_id, self.session)
|
||||
self.confirmOrder = ConfirmOrder(url_prefix, func_req_id, self.session)
|
||||
self.cancelOrder = CancelOrder(url_prefix, func_req_id, self.session)
|
||||
self.releaseProfile = ReleaseProfile(url_prefix, func_req_id, self.session)
|
||||
self.handleDownloadProgressInfo = HandleDownloadProgressInfo(url_prefix, func_req_id, self.session)
|
||||
self.downloadOrder = JsonHttpApiClient(DownloadOrder(), url_prefix, func_req_id, self.session)
|
||||
self.confirmOrder = JsonHttpApiClient(ConfirmOrder(), url_prefix, func_req_id, self.session)
|
||||
self.cancelOrder = JsonHttpApiClient(CancelOrder(), url_prefix, func_req_id, self.session)
|
||||
self.releaseProfile = JsonHttpApiClient(ReleaseProfile(), url_prefix, func_req_id, self.session)
|
||||
self.handleDownloadProgressInfo = JsonHttpApiClient(HandleDownloadProgressInfo(), url_prefix, func_req_id, self.session)
|
||||
|
||||
def _gen_func_id(self) -> str:
|
||||
"""Generate the next function call id."""
|
||||
self.func_id += 1
|
||||
return 'FCI-%u-%u' % (time.time(), self.func_id)
|
||||
|
||||
|
||||
def call_downloadOrder(self, data: dict) -> dict:
|
||||
"""Perform ES2+ DownloadOrder function (SGP.22 section 5.3.1)."""
|
||||
return self.downloadOrder.call(data, self._gen_func_id())
|
||||
@@ -237,3 +247,116 @@ class Es2pApiClient:
|
||||
def call_handleDownloadProgressInfo(self, data: dict) -> dict:
|
||||
"""Perform ES2+ HandleDownloadProgressInfo function (SGP.22 section 5.3.5)."""
|
||||
return self.handleDownloadProgressInfo.call(data, self._gen_func_id())
|
||||
|
||||
class Es2pApiServerHandlerSmdpp(abc.ABC):
|
||||
"""ES2+ (SMDP+ side) API Server handler class. The API user is expected to override the contained methods."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def call_downloadOrder(self, data: dict) -> (dict, str):
|
||||
"""Perform ES2+ DownloadOrder function (SGP.22 section 5.3.1)."""
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def call_confirmOrder(self, data: dict) -> (dict, str):
|
||||
"""Perform ES2+ ConfirmOrder function (SGP.22 section 5.3.2)."""
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def call_cancelOrder(self, data: dict) -> (dict, str):
|
||||
"""Perform ES2+ CancelOrder function (SGP.22 section 5.3.3)."""
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def call_releaseProfile(self, data: dict) -> (dict, str):
|
||||
"""Perform ES2+ CancelOrder function (SGP.22 section 5.3.4)."""
|
||||
pass
|
||||
|
||||
class Es2pApiServerHandlerMno(abc.ABC):
|
||||
"""ES2+ (MNO side) API Server handler class. The API user is expected to override the contained methods."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def call_handleDownloadProgressInfo(self, data: dict) -> (dict, str):
|
||||
"""Perform ES2+ HandleDownloadProgressInfo function (SGP.22 section 5.3.5)."""
|
||||
pass
|
||||
|
||||
class Es2pApiServer(abc.ABC):
|
||||
"""Main class representing a full ES2+ API server. Has one method for each API function."""
|
||||
app = None
|
||||
|
||||
def __init__(self, port: int, interface: str, server_cert: str = None, client_cert_verify: str = None):
|
||||
logger.debug("HTTP SRV: starting ES2+ API server on %s:%s" % (interface, port))
|
||||
self.port = port
|
||||
self.interface = interface
|
||||
if server_cert:
|
||||
self.server_cert = ssl.PrivateCertificate.loadPEM(Path(server_cert).read_text())
|
||||
else:
|
||||
self.server_cert = None
|
||||
if client_cert_verify:
|
||||
self.client_cert_verify = ssl.Certificate.loadPEM(Path(client_cert_verify).read_text())
|
||||
else:
|
||||
self.client_cert_verify = None
|
||||
|
||||
def reactor(self, reactor: PosixReactorBase):
|
||||
logger.debug("HTTP SRV: listen on %s:%s" % (self.interface, self.port))
|
||||
if self.server_cert:
|
||||
if self.client_cert_verify:
|
||||
reactor.listenSSL(self.port, Site(self.app.resource()), self.server_cert.options(self.client_cert_verify),
|
||||
interface=self.interface)
|
||||
else:
|
||||
reactor.listenSSL(self.port, Site(self.app.resource()), self.server_cert.options(),
|
||||
interface=self.interface)
|
||||
else:
|
||||
reactor.listenTCP(self.port, Site(self.app.resource()), interface=self.interface)
|
||||
return defer.Deferred()
|
||||
|
||||
class Es2pApiServerSmdpp(Es2pApiServer):
|
||||
"""ES2+ (SMDP+ side) API Server."""
|
||||
app = Klein()
|
||||
|
||||
def __init__(self, port: int, interface: str, handler: Es2pApiServerHandlerSmdpp,
|
||||
server_cert: str = None, client_cert_verify: str = None):
|
||||
super().__init__(port, interface, server_cert, client_cert_verify)
|
||||
self.handler = handler
|
||||
self.downloadOrder = JsonHttpApiServer(DownloadOrder(), handler.call_downloadOrder)
|
||||
self.confirmOrder = JsonHttpApiServer(ConfirmOrder(), handler.call_confirmOrder)
|
||||
self.cancelOrder = JsonHttpApiServer(CancelOrder(), handler.call_cancelOrder)
|
||||
self.releaseProfile = JsonHttpApiServer(ReleaseProfile(), handler.call_releaseProfile)
|
||||
task.react(self.reactor)
|
||||
|
||||
@app.route(DownloadOrder.path)
|
||||
def call_downloadOrder(self, request: Request) -> dict:
|
||||
"""Perform ES2+ DownloadOrder function (SGP.22 section 5.3.1)."""
|
||||
return self.downloadOrder.call(request)
|
||||
|
||||
@app.route(ConfirmOrder.path)
|
||||
def call_confirmOrder(self, request: Request) -> dict:
|
||||
"""Perform ES2+ ConfirmOrder function (SGP.22 section 5.3.2)."""
|
||||
return self.confirmOrder.call(request)
|
||||
|
||||
@app.route(CancelOrder.path)
|
||||
def call_cancelOrder(self, request: Request) -> dict:
|
||||
"""Perform ES2+ CancelOrder function (SGP.22 section 5.3.3)."""
|
||||
return self.cancelOrder.call(request)
|
||||
|
||||
@app.route(ReleaseProfile.path)
|
||||
def call_releaseProfile(self, request: Request) -> dict:
|
||||
"""Perform ES2+ CancelOrder function (SGP.22 section 5.3.4)."""
|
||||
return self.releaseProfile.call(request)
|
||||
|
||||
class Es2pApiServerMno(Es2pApiServer):
|
||||
"""ES2+ (MNO side) API Server."""
|
||||
|
||||
app = Klein()
|
||||
|
||||
def __init__(self, port: int, interface: str, handler: Es2pApiServerHandlerMno,
|
||||
server_cert: str = None, client_cert_verify: str = None):
|
||||
super().__init__(port, interface, server_cert, client_cert_verify)
|
||||
self.handler = handler
|
||||
self.handleDownloadProgressInfo = JsonHttpApiServer(HandleDownloadProgressInfo(),
|
||||
handler.call_handleDownloadProgressInfo)
|
||||
task.react(self.reactor)
|
||||
|
||||
@app.route(HandleDownloadProgressInfo.path)
|
||||
def call_handleDownloadProgressInfo(self, request: Request) -> dict:
|
||||
"""Perform ES2+ HandleDownloadProgressInfo function (SGP.22 section 5.3.5)."""
|
||||
return self.handleDownloadProgressInfo.call(request)
|
||||
|
||||
@@ -155,11 +155,11 @@ class Es9pApiClient:
|
||||
if server_cert_verify:
|
||||
self.session.verify = server_cert_verify
|
||||
|
||||
self.initiateAuthentication = InitiateAuthentication(url_prefix, '', self.session)
|
||||
self.authenticateClient = AuthenticateClient(url_prefix, '', self.session)
|
||||
self.getBoundProfilePackage = GetBoundProfilePackage(url_prefix, '', self.session)
|
||||
self.handleNotification = HandleNotification(url_prefix, '', self.session)
|
||||
self.cancelSession = CancelSession(url_prefix, '', self.session)
|
||||
self.initiateAuthentication = JsonHttpApiClient(InitiateAuthentication(), url_prefix, '', self.session)
|
||||
self.authenticateClient = JsonHttpApiClient(AuthenticateClient(), url_prefix, '', self.session)
|
||||
self.getBoundProfilePackage = JsonHttpApiClient(GetBoundProfilePackage(), url_prefix, '', self.session)
|
||||
self.handleNotification = JsonHttpApiClient(HandleNotification(), url_prefix, '', self.session)
|
||||
self.cancelSession = JsonHttpApiClient(CancelSession(), url_prefix, '', self.session)
|
||||
|
||||
def call_initiateAuthentication(self, data: dict) -> dict:
|
||||
return self.initiateAuthentication.call(data)
|
||||
|
||||
@@ -19,8 +19,10 @@ import abc
|
||||
import requests
|
||||
import logging
|
||||
import json
|
||||
from typing import Optional
|
||||
from typing import Optional, Tuple
|
||||
import base64
|
||||
from twisted.web.server import Request
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
@@ -131,6 +133,16 @@ class JsonResponseHeader(ApiParam):
|
||||
if status not in ['Executed-Success', 'Executed-WithWarning', 'Failed', 'Expired']:
|
||||
raise ValueError('Unknown/unspecified status "%s"' % status)
|
||||
|
||||
class JsonRequestHeader(ApiParam):
|
||||
"""SGP.22 section 6.5.1.3."""
|
||||
@classmethod
|
||||
def verify_decoded(cls, data):
|
||||
func_req_id = data.get('functionRequesterIdentifier')
|
||||
if not func_req_id:
|
||||
raise ValueError('Missing mandatory functionRequesterIdentifier in header')
|
||||
func_call_id = data.get('functionCallIdentifier')
|
||||
if not func_call_id:
|
||||
raise ValueError('Missing mandatory functionCallIdentifier in header')
|
||||
|
||||
class HttpStatusError(Exception):
|
||||
pass
|
||||
@@ -161,65 +173,118 @@ class ApiError(Exception):
|
||||
|
||||
class JsonHttpApiFunction(abc.ABC):
|
||||
"""Base class for representing an HTTP[s] API Function."""
|
||||
# the below class variables are expected to be overridden in derived classes
|
||||
# The below class variables are used to describe the properties of the API function. Derived classes are expected
|
||||
# to orverride those class properties with useful values. The prefixes "input_" and "output_" refer to the API
|
||||
# function from an abstract point of view. Seen from the client perspective, "input_" will refer to parameters the
|
||||
# client sends to a HTTP server. Seen from the server perspective, "input_" will refer to parameters the server
|
||||
# receives from the a requesting client. The same applies vice versa to class variables that have an "output_"
|
||||
# prefix.
|
||||
|
||||
# path of the API function (e.g. '/gsma/rsp2/es2plus/confirmOrder', see also method rewrite_url).
|
||||
path = None
|
||||
|
||||
# dictionary of input parameters. key is parameter name, value is ApiParam class
|
||||
input_params = {}
|
||||
|
||||
# list of mandatory input parameters
|
||||
input_mandatory = []
|
||||
|
||||
# dictionary of output parameters. key is parameter name, value is ApiParam class
|
||||
output_params = {}
|
||||
|
||||
# list of mandatory output parameters (for successful response)
|
||||
output_mandatory = []
|
||||
|
||||
# list of mandatory output parameters (for failed response)
|
||||
output_mandatory_failed = []
|
||||
|
||||
# expected HTTP status code of the response
|
||||
expected_http_status = 200
|
||||
|
||||
# the HTTP method used (GET, OPTIONS, HEAD, POST, PUT, PATCH or DELETE)
|
||||
http_method = 'POST'
|
||||
|
||||
# additional custom HTTP headers (client requests)
|
||||
extra_http_req_headers = {}
|
||||
|
||||
def __init__(self, url_prefix: str, func_req_id: Optional[str], session: requests.Session):
|
||||
self.url_prefix = url_prefix
|
||||
self.func_req_id = func_req_id
|
||||
self.session = session
|
||||
# additional custom HTTP headers (server responses)
|
||||
extra_http_res_headers = {}
|
||||
|
||||
def encode(self, data: dict, func_call_id: Optional[str] = None) -> dict:
|
||||
def __new__(cls, *args, role = 'legacy_client', **kwargs):
|
||||
"""
|
||||
Args:
|
||||
args: (see JsonHttpApiClient and JsonHttpApiServer)
|
||||
role: role ('server' or 'client') in which the JsonHttpApiFunction should be created.
|
||||
kwargs: (see JsonHttpApiClient and JsonHttpApiServer)
|
||||
"""
|
||||
|
||||
# Create a dictionary with the class attributes of this class (the properties listed above and the encode_
|
||||
# decode_ methods below). The dictionary will not include any dunder/magic methods
|
||||
cls_attr = {attr_name: getattr(cls, attr_name) for attr_name in dir(cls) if not attr_name.startswith('__')}
|
||||
|
||||
# Normal instantiation as JsonHttpApiFunction:
|
||||
if len(args) == 0 and len(kwargs) == 0:
|
||||
return type(cls.__name__, (abc.ABC,), cls_attr)()
|
||||
|
||||
# Instantiation as as JsonHttpApiFunction with a JsonHttpApiClient or JsonHttpApiServer base
|
||||
if role == 'legacy_client':
|
||||
# Deprecated: With the advent of the server role (JsonHttpApiServer) the API had to be changed. To maintain
|
||||
# compatibility with existing code (out-of-tree) the original behaviour and API interface and behaviour had
|
||||
# to be preserved. Already existing JsonHttpApiFunction definitions will still work and the related objects
|
||||
# may still be created on the original way: my_api_func = MyApiFunc(url_prefix, func_req_id, self.session)
|
||||
logger.warning('implicit role (falling back to legacy JsonHttpApiClient) is deprecated, please specify role explcitly')
|
||||
result = type(cls.__name__, (JsonHttpApiClient,), cls_attr)(None, *args, **kwargs)
|
||||
result.api_func = result
|
||||
result.legacy = True
|
||||
return result
|
||||
elif role == 'client':
|
||||
# Create a JsonHttpApiFunction in client role
|
||||
# Example: my_api_func = MyApiFunc(url_prefix, func_req_id, self.session, role='client')
|
||||
result = type(cls.__name__, (JsonHttpApiClient,), cls_attr)(None, *args, **kwargs)
|
||||
result.api_func = result
|
||||
return result
|
||||
elif role == 'server':
|
||||
# Create a JsonHttpApiFunction in server role
|
||||
# Example: my_api_func = MyApiFunc(url_prefix, func_req_id, self.session, role='server')
|
||||
result = type(cls.__name__, (JsonHttpApiServer,), cls_attr)(None, *args, **kwargs)
|
||||
result.api_func = result
|
||||
return result
|
||||
else:
|
||||
raise ValueError('Invalid role \'%s\' specified' % role)
|
||||
|
||||
def encode_client(self, data: dict) -> dict:
|
||||
"""Validate an encode input dict into JSON-serializable dict for request body."""
|
||||
output = {}
|
||||
if func_call_id:
|
||||
output['header'] = {
|
||||
'functionRequesterIdentifier': self.func_req_id,
|
||||
'functionCallIdentifier': func_call_id
|
||||
}
|
||||
|
||||
for p in self.input_mandatory:
|
||||
if not p in data:
|
||||
raise ValueError('Mandatory input parameter %s missing' % p)
|
||||
for p, v in data.items():
|
||||
p_class = self.input_params.get(p)
|
||||
if not p_class:
|
||||
logger.warning('Unexpected/unsupported input parameter %s=%s', p, v)
|
||||
output[p] = v
|
||||
# pySim/esim/http_json_api.py:269:47: E1101: Instance of 'JsonHttpApiFunction' has no 'legacy' member (no-member)
|
||||
# pylint: disable=no-member
|
||||
if hasattr(self, 'legacy') and self.legacy:
|
||||
output[p] = JsonRequestHeader.encode(v)
|
||||
else:
|
||||
logger.warning('Unexpected/unsupported input parameter %s=%s', p, v)
|
||||
output[p] = v
|
||||
else:
|
||||
output[p] = p_class.encode(v)
|
||||
return output
|
||||
|
||||
def decode(self, data: dict) -> dict:
|
||||
def decode_client(self, data: dict) -> dict:
|
||||
"""[further] Decode and validate the JSON-Dict of the response body."""
|
||||
output = {}
|
||||
if 'header' in self.output_params:
|
||||
# let's first do the header, it's special
|
||||
if not 'header' in data:
|
||||
raise ValueError('Mandatory output parameter "header" missing')
|
||||
hdr_class = self.output_params.get('header')
|
||||
output['header'] = hdr_class.decode(data['header'])
|
||||
output_mandatory = self.output_mandatory
|
||||
|
||||
if output['header']['functionExecutionStatus']['status'] not in ['Executed-Success','Executed-WithWarning']:
|
||||
raise ApiError(output['header']['functionExecutionStatus'])
|
||||
# we can only expect mandatory parameters to be present in case of successful execution
|
||||
for p in self.output_mandatory:
|
||||
if p == 'header':
|
||||
continue
|
||||
# In case a provided header (may be optional) indicates that the API function call was unsuccessful, a
|
||||
# different set of mandatory parameters applies.
|
||||
header = data.get('header')
|
||||
if header:
|
||||
if data['header']['functionExecutionStatus']['status'] not in ['Executed-Success','Executed-WithWarning']:
|
||||
output_mandatory = self.output_mandatory_failed
|
||||
|
||||
for p in output_mandatory:
|
||||
if not p in data:
|
||||
raise ValueError('Mandatory output parameter "%s" missing' % p)
|
||||
for p, v in data.items():
|
||||
@@ -231,35 +296,195 @@ class JsonHttpApiFunction(abc.ABC):
|
||||
output[p] = p_class.decode(v)
|
||||
return output
|
||||
|
||||
def encode_server(self, data: dict) -> dict:
|
||||
"""Validate an encode input dict into JSON-serializable dict for response body."""
|
||||
output = {}
|
||||
output_mandatory = self.output_mandatory
|
||||
|
||||
# In case a provided header (may be optional) indicates that the API function call was unsuccessful, a
|
||||
# different set of mandatory parameters applies.
|
||||
header = data.get('header')
|
||||
if header:
|
||||
if data['header']['functionExecutionStatus']['status'] not in ['Executed-Success','Executed-WithWarning']:
|
||||
output_mandatory = self.output_mandatory_failed
|
||||
|
||||
for p in output_mandatory:
|
||||
if not p in data:
|
||||
raise ValueError('Mandatory output parameter %s missing' % p)
|
||||
for p, v in data.items():
|
||||
p_class = self.output_params.get(p)
|
||||
if not p_class:
|
||||
logger.warning('Unexpected/unsupported output parameter %s=%s', p, v)
|
||||
output[p] = v
|
||||
else:
|
||||
output[p] = p_class.encode(v)
|
||||
return output
|
||||
|
||||
def decode_server(self, data: dict) -> dict:
|
||||
"""[further] Decode and validate the JSON-Dict of the request body."""
|
||||
output = {}
|
||||
|
||||
for p in self.input_mandatory:
|
||||
if not p in data:
|
||||
raise ValueError('Mandatory input parameter "%s" missing' % p)
|
||||
for p, v in data.items():
|
||||
p_class = self.input_params.get(p)
|
||||
if not p_class:
|
||||
logger.warning('Unexpected/unsupported input parameter "%s"="%s"', p, v)
|
||||
output[p] = v
|
||||
else:
|
||||
output[p] = p_class.decode(v)
|
||||
return output
|
||||
|
||||
def rewrite_url(self, data: dict, url: str) -> Tuple[dict, str]:
|
||||
"""
|
||||
Rewrite a static URL using information passed in the data dict. This method may be overloaded by a derived
|
||||
class to allow fully dynamic URLs. The input parameters required for the URL rewriting may be passed using
|
||||
data parameter. In case those parameters are additional parameters that are not intended to be passed to
|
||||
the encode_client method later, they must be removed explcitly.
|
||||
|
||||
Args:
|
||||
data: (see JsonHttpApiClient and JsonHttpApiServer)
|
||||
url: statically generated URL string (see comment in JsonHttpApiClient)
|
||||
"""
|
||||
|
||||
# This implementation is a placeholder in which we do not perform any URL rewriting. We just pass through data
|
||||
# and url unmodified.
|
||||
return data, url
|
||||
|
||||
class JsonHttpApiClient():
|
||||
def __init__(self, api_func: JsonHttpApiFunction, url_prefix: str, func_req_id: Optional[str],
|
||||
session: requests.Session):
|
||||
"""
|
||||
Args:
|
||||
api_func : API function definition (JsonHttpApiFunction)
|
||||
url_prefix : prefix to be put in front of the API function path (see JsonHttpApiFunction)
|
||||
func_req_id : function requestor id to use for requests
|
||||
session : session object (requests)
|
||||
"""
|
||||
self.api_func = api_func
|
||||
self.url_prefix = url_prefix
|
||||
self.func_req_id = func_req_id
|
||||
self.session = session
|
||||
|
||||
def call(self, data: dict, func_call_id: Optional[str] = None, timeout=10) -> Optional[dict]:
|
||||
"""Make an API call to the HTTP API endpoint represented by this object.
|
||||
Input data is passed in `data` as json-serializable dict. Output data
|
||||
is returned as json-deserialized dict."""
|
||||
url = self.url_prefix + self.path
|
||||
encoded = json.dumps(self.encode(data, func_call_id))
|
||||
"""
|
||||
Make an API call to the HTTP API endpoint represented by this object. Input data is passed in `data` as
|
||||
json-serializable fields. `data` may also contain additional parameters required for URL rewriting (see
|
||||
rewrite_url in class JsonHttpApiFunction). Output data is returned as json-deserialized dict.
|
||||
|
||||
Args:
|
||||
data: Input data required to perform the request.
|
||||
func_call_id: Function Call Identifier, if present a header field is generated automatically.
|
||||
timeout: Maximum amount of time to wait for the request to complete.
|
||||
"""
|
||||
|
||||
# In case a function caller ID is supplied, use it together with the stored function requestor ID to generate
|
||||
# and prepend the header field according to SGP.22, section 6.5.1.1 and 6.5.1.3. (the presence of the header
|
||||
# field is checked by the encode_client method)
|
||||
if func_call_id:
|
||||
data = {'header' : {'functionRequesterIdentifier': self.func_req_id,
|
||||
'functionCallIdentifier': func_call_id}} | data
|
||||
|
||||
# The URL used for the HTTP request (see below) normally consists of the initially given url_prefix
|
||||
# concatenated with the path defined by the JsonHttpApiFunction definition. This static URL path may be
|
||||
# rewritten by rewrite_url method defined in the JsonHttpApiFunction.
|
||||
data, url = self.api_func.rewrite_url(data, self.url_prefix + self.api_func.path)
|
||||
|
||||
# Encode the message (the presence of mandatory fields is checked during encoding)
|
||||
encoded = json.dumps(self.api_func.encode_client(data))
|
||||
|
||||
# Apply HTTP request headers according to SGP.22, section 6.5.1
|
||||
req_headers = {
|
||||
'Content-Type': 'application/json',
|
||||
'X-Admin-Protocol': 'gsma/rsp/v2.5.0',
|
||||
}
|
||||
req_headers.update(self.extra_http_req_headers)
|
||||
req_headers.update(self.api_func.extra_http_req_headers)
|
||||
|
||||
# Perform HTTP request
|
||||
logger.debug("HTTP REQ %s - hdr: %s '%s'" % (url, req_headers, encoded))
|
||||
response = self.session.request(self.http_method, url, data=encoded, headers=req_headers, timeout=timeout)
|
||||
response = self.session.request(self.api_func.http_method, url, data=encoded, headers=req_headers, timeout=timeout)
|
||||
logger.debug("HTTP RSP-STS: [%u] hdr: %s" % (response.status_code, response.headers))
|
||||
logger.debug("HTTP RSP: %s" % (response.content))
|
||||
|
||||
if response.status_code != self.expected_http_status:
|
||||
# Check HTTP response status code and make sure that the returned HTTP headers look plausible (according to
|
||||
# SGP.22, section 6.5.1)
|
||||
if response.status_code != self.api_func.expected_http_status:
|
||||
raise HttpStatusError(response)
|
||||
if not response.headers.get('Content-Type').startswith(req_headers['Content-Type']):
|
||||
if response.content and not response.headers.get('Content-Type').startswith(req_headers['Content-Type']):
|
||||
raise HttpHeaderError(response)
|
||||
if not response.headers.get('X-Admin-Protocol', 'gsma/rsp/v2.unknown').startswith('gsma/rsp/v2.'):
|
||||
raise HttpHeaderError(response)
|
||||
|
||||
# Decode response and return the result back to the caller
|
||||
if response.content:
|
||||
if response.headers.get('Content-Type').startswith('application/json'):
|
||||
return self.decode(response.json())
|
||||
elif response.headers.get('Content-Type').startswith('text/plain;charset=UTF-8'):
|
||||
return { 'data': response.content.decode('utf-8') }
|
||||
raise HttpHeaderError(f'unimplemented response Content-Type: {response.headers=!r}')
|
||||
|
||||
output = self.api_func.decode_client(response.json())
|
||||
# In case the response contains a header, check it to make sure that the API call was executed successfully
|
||||
# (the presence of the header field is checked by the decode_client method)
|
||||
if 'header' in output:
|
||||
if output['header']['functionExecutionStatus']['status'] not in ['Executed-Success','Executed-WithWarning']:
|
||||
raise ApiError(output['header']['functionExecutionStatus'])
|
||||
return output
|
||||
return None
|
||||
|
||||
class JsonHttpApiServer():
|
||||
def __init__(self, api_func: JsonHttpApiFunction, call_handler = None):
|
||||
"""
|
||||
Args:
|
||||
api_func : API function definition (JsonHttpApiFunction)
|
||||
call_handler : handler function to process the request. This function must accept the
|
||||
decoded request as a dictionary. The handler function must return a tuple consisting
|
||||
of the response in the form of a dictionary (may be empty), and a function execution
|
||||
status string ('Executed-Success', 'Executed-WithWarning', 'Failed' or 'Expired')
|
||||
"""
|
||||
self.api_func = api_func
|
||||
if call_handler:
|
||||
self.call_handler = call_handler
|
||||
else:
|
||||
self.call_handler = self.default_handler
|
||||
|
||||
def default_handler(self, data: dict) -> (dict, str):
|
||||
"""default handler, used in case no call handler is provided."""
|
||||
logger.error("no handler function for request: %s" % str(data))
|
||||
return {}, 'Failed'
|
||||
|
||||
def call(self, request: Request) -> str:
|
||||
""" Process an incoming request.
|
||||
Args:
|
||||
request : request object as received using twisted.web.server
|
||||
Returns:
|
||||
encoded JSON string (HTTP response code and headers are set by calling the appropriate methods on the
|
||||
provided the request object)
|
||||
"""
|
||||
|
||||
# Make sure the request is done with the correct HTTP method
|
||||
if (request.method.decode() != self.api_func.http_method):
|
||||
raise ValueError('Wrong HTTP method %s!=%s' % (request.method.decode(), self.api_func.http_method))
|
||||
|
||||
# Decode the request
|
||||
decoded_request = self.api_func.decode_server(json.loads(request.content.read()))
|
||||
|
||||
# Run call handler (see above)
|
||||
data, fe_status = self.call_handler(decoded_request)
|
||||
|
||||
# In case a function execution status is returned, use it to generate and prepend the header field according to
|
||||
# SGP.22, section 6.5.1.2 and 6.5.1.4 (the presence of the header filed is checked by the encode_server method)
|
||||
if fe_status:
|
||||
data = {'header' : {'functionExecutionStatus': {'status' : fe_status}}} | data
|
||||
|
||||
# Encode the message (the presence of mandatory fields is checked during encoding)
|
||||
encoded = json.dumps(self.api_func.encode_server(data))
|
||||
|
||||
# Apply HTTP request headers according to SGP.22, section 6.5.1
|
||||
res_headers = {
|
||||
'Content-Type': 'application/json',
|
||||
'X-Admin-Protocol': 'gsma/rsp/v2.5.0',
|
||||
}
|
||||
res_headers.update(self.api_func.extra_http_res_headers)
|
||||
for header, value in res_headers.items():
|
||||
request.setHeader(header, value)
|
||||
request.setResponseCode(self.api_func.expected_http_status)
|
||||
|
||||
# Return the encoded result back to the caller for sending (using twisted/klein)
|
||||
return encoded
|
||||
|
||||
|
||||
@@ -19,20 +19,10 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import copy
|
||||
import pprint
|
||||
import logging
|
||||
import traceback
|
||||
import inspect
|
||||
from typing import List, Generator
|
||||
from typing import Generator
|
||||
from pySim.esim.saip.personalization import ConfigurableParameter
|
||||
from pySim.esim.saip import param_source
|
||||
from pySim.esim.saip import ProfileElementSequence, ProfileElementSD
|
||||
from pySim.global_platform import KeyUsageQualifier
|
||||
from osmocom.utils import b2h
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
def _func_():
|
||||
return inspect.currentframe().f_back.f_code.co_name
|
||||
from pySim.esim.saip import ProfileElementSequence
|
||||
|
||||
class BatchPersonalization:
|
||||
"""Produce a series of eSIM profiles from predefined parameters.
|
||||
@@ -40,9 +30,9 @@ class BatchPersonalization:
|
||||
|
||||
Usage example:
|
||||
|
||||
der_input = some_file.open('rb').read()
|
||||
der_input = open('some_file', 'rb').read()
|
||||
pes = ProfileElementSequence.from_der(der_input)
|
||||
p = pers.BatchPersonalization(
|
||||
p = BatchPersonalization(
|
||||
n=10,
|
||||
src_pes=pes,
|
||||
csv_rows=get_csv_reader())
|
||||
@@ -64,9 +54,12 @@ class BatchPersonalization:
|
||||
"""
|
||||
|
||||
class ParamAndSrc:
|
||||
'tie a ConfigurableParameter to a source of actual values'
|
||||
"""tie a ConfigurableParameter to a source of actual values"""
|
||||
def __init__(self, param: ConfigurableParameter, src: param_source.ParamSource):
|
||||
self.param = param
|
||||
if isinstance(param, type):
|
||||
self.param_cls = param
|
||||
else:
|
||||
self.param_cls = param.__class__
|
||||
self.src = src
|
||||
|
||||
def __init__(self,
|
||||
@@ -81,10 +74,10 @@ class BatchPersonalization:
|
||||
copied.
|
||||
params: list of ParamAndSrc instances, defining a ConfigurableParameter and corresponding ParamSource to fill in
|
||||
profile values.
|
||||
csv_rows: A list or generator producing all CSV rows one at a time, starting with a row containing the column
|
||||
headers. This is compatible with the python csv.reader. Each row gets passed to
|
||||
ParamSource.get_next(), such that ParamSource implementations can access the row items.
|
||||
See param_source.CsvSource.
|
||||
csv_rows: A generator (e.g. iter(list_of_rows)) producing all CSV rows one at a time, starting with a row
|
||||
containing the column headers. This is compatible with the python csv.reader. Each row gets passed to
|
||||
ParamSource.get_next(), such that ParamSource implementations can access the row items. See
|
||||
param_source.CsvSource.
|
||||
"""
|
||||
self.n = n
|
||||
self.params = params or []
|
||||
@@ -92,7 +85,7 @@ class BatchPersonalization:
|
||||
self.csv_rows = csv_rows
|
||||
|
||||
def add_param_and_src(self, param:ConfigurableParameter, src:param_source.ParamSource):
|
||||
self.params.append(BatchPersonalization.ParamAndSrc(param=param, src=src))
|
||||
self.params.append(BatchPersonalization.ParamAndSrc(param, src))
|
||||
|
||||
def generate_profiles(self):
|
||||
# get first row of CSV: column names
|
||||
@@ -119,248 +112,9 @@ class BatchPersonalization:
|
||||
try:
|
||||
input_value = p.src.get_next(csv_row=csv_row)
|
||||
assert input_value is not None
|
||||
value = p.param.__class__.validate_val(input_value)
|
||||
p.param.__class__.apply_val(pes, value)
|
||||
value = p.param_cls.validate_val(input_value)
|
||||
p.param_cls.apply_val(pes, value)
|
||||
except Exception as e:
|
||||
print(traceback.format_exc())
|
||||
logger.error('during %s: %r', _func_(), e)
|
||||
raise ValueError(f'{p.param.name} fed by {p.src.name}: {e!r}') from e
|
||||
raise ValueError(f'{p.param_cls.get_name()} fed by {p.src.name}: {e}') from e
|
||||
|
||||
yield pes
|
||||
|
||||
|
||||
class UppAudit(dict):
|
||||
"""
|
||||
Key-value pairs collected from a single UPP DER or PES.
|
||||
|
||||
UppAudit itself is a dict, callers may use the standard python dict API to access key-value pairs read from the UPP.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def from_der(cls, der: bytes, params: List, der_size=False, additional_sd_keys=False):
|
||||
'''return a dict of parameter name and set of selected parameter values found in a DER encoded profile. Note:
|
||||
some ConfigurableParameter implementations return more than one key-value pair, for example, Imsi returns
|
||||
both 'IMSI' and 'IMSI-ACC' parameters.
|
||||
|
||||
e.g.
|
||||
UppAudit.from_der(my_der, [Imsi, ])
|
||||
--> {'IMSI': '001010000000023', 'IMSI-ACC': '5'}
|
||||
|
||||
(where 'IMSI' == Imsi.name)
|
||||
|
||||
Read all parameters listed in params. params is a list of either ConfigurableParameter classes or
|
||||
ConfigurableParameter class instances. This calls only classmethods, so each entry in params can either be the
|
||||
class itself, or a class-instance of, a (non-abstract) ConfigurableParameter subclass.
|
||||
For example, params = [Imsi, ] is equivalent to params = [Imsi(), ].
|
||||
|
||||
For der_size=True, also include a {'der_size':12345} entry.
|
||||
|
||||
For additional_sd_keys=True, output also all Security Domain KVN that there are *no* ConfigurableParameter
|
||||
subclasses for. For example, SCP80 has reserved kvn 0x01..0x0f, but we offer only Scp80Kvn01, Scp80Kvn02,
|
||||
Scp80Kvn03. So we would not show kvn 0x04..0x0f in an audit. additional_sd_keys=True includes audits of all SD
|
||||
key KVN there may be in the UPP. This helps to spot SD keys that may already be present in a UPP template, with
|
||||
unexpected / unusual kvn.
|
||||
'''
|
||||
|
||||
# make an instance of this class
|
||||
upp_audit = cls()
|
||||
|
||||
if der_size:
|
||||
upp_audit['der_size'] = set((len(der), ))
|
||||
|
||||
pes = ProfileElementSequence.from_der(der)
|
||||
for param in params:
|
||||
try:
|
||||
for valdict in param.get_values_from_pes(pes):
|
||||
upp_audit.add_values(valdict)
|
||||
except Exception as e:
|
||||
raise ValueError(f'Error during audit for parameter {param}: {e}') from e
|
||||
|
||||
if not additional_sd_keys:
|
||||
return upp_audit
|
||||
|
||||
# additional_sd_keys
|
||||
for pe in pes.pe_list:
|
||||
if pe.type != 'securityDomain':
|
||||
continue
|
||||
assert isinstance(pe, ProfileElementSD)
|
||||
|
||||
for key in pe.keys:
|
||||
audit_key = f'SdKey_KVN{key.key_version_number:02x}_ID{key.key_identifier:02x}'
|
||||
kuq_bin = KeyUsageQualifier.build(key.key_usage_qualifier).hex()
|
||||
audit_val = f'{key.key_components=!r} key_usage_qualifier=0x{kuq_bin}={key.key_usage_qualifier!r}'
|
||||
upp_audit[audit_key] = set((audit_val, ))
|
||||
|
||||
return upp_audit
|
||||
|
||||
def get_single_val(self, key, validate=True, allow_absent=False, absent_val=None):
|
||||
"""
|
||||
Return the audit's value for the given audit key (like 'IMSI' or 'IMSI-ACC').
|
||||
Any kind of value may occur multiple times in a profile. When all of these agree to the same unambiguous value,
|
||||
return that value. When they do not agree, raise a ValueError.
|
||||
"""
|
||||
# key should be a string, but if someone passes a ConfigurableParameter, just use its default name
|
||||
if ConfigurableParameter.is_super_of(key):
|
||||
key = key.get_name()
|
||||
|
||||
assert isinstance(key, str)
|
||||
v = self.get(key)
|
||||
if v is None and allow_absent:
|
||||
return absent_val
|
||||
if not isinstance(v, set):
|
||||
raise ValueError(f'audit value should be a set(), got {v!r}')
|
||||
if len(v) != 1:
|
||||
raise ValueError(f'expected a single value for {key}, got {v!r}')
|
||||
v = tuple(v)[0]
|
||||
return v
|
||||
|
||||
@staticmethod
|
||||
def audit_val_to_str(v):
|
||||
"""
|
||||
Usually, we want to see a single value in an audit. Still, to be able to collect multiple ambiguous values,
|
||||
audit values are always python sets. Turn it into a nice string representation: only the value when it is
|
||||
unambiguous, otherwise a list of the ambiguous values.
|
||||
A value may also be completely absent, then return 'not present'.
|
||||
"""
|
||||
def try_single_val(w):
|
||||
'change single-entry sets to just the single value'
|
||||
if isinstance(w, set):
|
||||
if len(w) == 1:
|
||||
return tuple(w)[0]
|
||||
if len(w) == 0:
|
||||
return None
|
||||
return w
|
||||
|
||||
v = try_single_val(v)
|
||||
if isinstance(v, bytes):
|
||||
v = bytes_to_hexstr(v)
|
||||
if v is None:
|
||||
return 'not present'
|
||||
return str(v)
|
||||
|
||||
def get_val_str(self, key):
|
||||
"""Return a string of the value stored for the given key"""
|
||||
return UppAudit.audit_val_to_str(self.get(key))
|
||||
|
||||
def add_values(self, src:dict):
|
||||
"""self and src are both a dict of sets.
|
||||
For example from
|
||||
self == { 'a': set((123,)) }
|
||||
and
|
||||
src == { 'a': set((456,)), 'b': set((789,)) }
|
||||
then after this function call:
|
||||
self == { 'a': set((123, 456,)), 'b': set((789,)) }
|
||||
"""
|
||||
assert isinstance(src, dict)
|
||||
for key, srcvalset in src.items():
|
||||
dstvalset = self.get(key)
|
||||
if dstvalset is None:
|
||||
dstvalset = set()
|
||||
self[key] = dstvalset
|
||||
dstvalset.add(srcvalset)
|
||||
|
||||
def __str__(self):
|
||||
return '\n'.join(f'{key}: {self.get_val_str(key)}' for key in sorted(self.keys()))
|
||||
|
||||
class BatchAudit(list):
|
||||
"""
|
||||
Collect UppAudit instances for a batch of UPP, for example from a personalization.BatchPersonalization.
|
||||
Produce an output CSV.
|
||||
|
||||
Usage example:
|
||||
|
||||
ba = BatchAudit(params=(personalization.Iccid, ))
|
||||
for upp_der in upps:
|
||||
ba.add_audit(upp_der)
|
||||
print(ba.summarize())
|
||||
|
||||
with open('output.csv', 'wb') as csv_data:
|
||||
csv_str = io.TextIOWrapper(csv_data, 'utf-8', newline='')
|
||||
csv.writer(csv_str).writerows( ba.to_csv_rows() )
|
||||
csv_str.flush()
|
||||
|
||||
BatchAudit itself is a list, callers may use the standard python list API to access the UppAudit instances.
|
||||
"""
|
||||
|
||||
def __init__(self, params:List):
|
||||
assert params
|
||||
self.params = params
|
||||
|
||||
def add_audit(self, upp_der:bytes):
|
||||
audit = UppAudit.from_der(upp_der, self.params)
|
||||
self.append(audit)
|
||||
return audit
|
||||
|
||||
def summarize(self):
|
||||
batch_audit = UppAudit()
|
||||
|
||||
audits = self
|
||||
|
||||
if len(audits) > 2:
|
||||
val_sep = ', ..., '
|
||||
else:
|
||||
val_sep = ', '
|
||||
|
||||
first_audit = None
|
||||
last_audit = None
|
||||
if len(audits) >= 1:
|
||||
first_audit = audits[0]
|
||||
if len(audits) >= 2:
|
||||
last_audit = audits[-1]
|
||||
|
||||
if first_audit:
|
||||
if last_audit:
|
||||
for key in first_audit.keys():
|
||||
first_val = first_audit.get_val_str(key)
|
||||
last_val = last_audit.get_val_str(key)
|
||||
|
||||
if first_val == last_val:
|
||||
val = first_val
|
||||
else:
|
||||
val_sep_with_newline = f"{val_sep.rstrip()}\n{' ' * (len(key) + 2)}"
|
||||
val = val_sep_with_newline.join((first_val, last_val))
|
||||
batch_audit[key] = val
|
||||
else:
|
||||
batch_audit.update(first_audit)
|
||||
|
||||
return batch_audit
|
||||
|
||||
def to_csv_rows(self, headers=True, sort_key=None):
|
||||
'''generator that yields all audits' values as rows, useful feed to a csv.writer.'''
|
||||
columns = set()
|
||||
for audit in self:
|
||||
columns.update(audit.keys())
|
||||
|
||||
columns = tuple(sorted(columns, key=sort_key))
|
||||
|
||||
if headers:
|
||||
yield columns
|
||||
|
||||
for audit in self:
|
||||
yield (audit.get_single_val(col, allow_absent=True, absent_val="") for col in columns)
|
||||
|
||||
def bytes_to_hexstr(b:bytes, sep=''):
|
||||
return sep.join(f'{x:02x}' for x in b)
|
||||
|
||||
def esim_profile_introspect(upp):
|
||||
pes = ProfileElementSequence.from_der(upp.read())
|
||||
d = {}
|
||||
d['upp'] = repr(pes)
|
||||
|
||||
def show_bytes_as_hexdump(item):
|
||||
if isinstance(item, bytes):
|
||||
return bytes_to_hexstr(item)
|
||||
if isinstance(item, list):
|
||||
return list(show_bytes_as_hexdump(i) for i in item)
|
||||
if isinstance(item, tuple):
|
||||
return tuple(show_bytes_as_hexdump(i) for i in item)
|
||||
if isinstance(item, dict):
|
||||
d = {}
|
||||
for k, v in item.items():
|
||||
d[k] = show_bytes_as_hexdump(v)
|
||||
return d
|
||||
return item
|
||||
|
||||
l = list((pe.type, show_bytes_as_hexdump(pe.decoded)) for pe in pes)
|
||||
d['pp'] = pprint.pformat(l, width=120)
|
||||
return d
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import secrets
|
||||
import random
|
||||
import re
|
||||
from osmocom.utils import b2h
|
||||
|
||||
@@ -37,13 +37,10 @@ class ParamSource:
|
||||
name = "none"
|
||||
numeric_base = None # or 10 or 16
|
||||
|
||||
@classmethod
|
||||
def from_str(cls, s:str):
|
||||
"""Subclasses implement this:
|
||||
if a parameter source defines some string input magic, override this function.
|
||||
For example, a RandomDigitSource derives the number of digits from the string length,
|
||||
so the user can enter '0000' to get a four digit random number."""
|
||||
return cls(s)
|
||||
def __init__(self, input_str:str):
|
||||
"""Subclasses should call super().__init__(input_str) before evaluating self.input_str. Each subclass __init__()
|
||||
may in turn manipulate self.input_str to apply expansions or decodings."""
|
||||
self.input_str = input_str
|
||||
|
||||
def get_next(self, csv_row:dict=None):
|
||||
"""Subclasses implement this: return the next value from the parameter source.
|
||||
@@ -51,108 +48,99 @@ class ParamSource:
|
||||
This default implementation is an empty source."""
|
||||
raise ParamSourceExhaustedExn()
|
||||
|
||||
@classmethod
|
||||
def from_str(cls, input_str:str):
|
||||
"""compatibility with earlier version of ParamSource. Just use the constructor."""
|
||||
return cls(input_str)
|
||||
|
||||
class ConstantSource(ParamSource):
|
||||
"""one value for all"""
|
||||
name = "constant"
|
||||
|
||||
def __init__(self, val:str):
|
||||
self.val = val
|
||||
|
||||
def get_next(self, csv_row:dict=None):
|
||||
return self.val
|
||||
return self.input_str
|
||||
|
||||
class InputExpandingParamSource(ParamSource):
|
||||
|
||||
def __init__(self, input_str:str):
|
||||
super().__init__(input_str)
|
||||
self.input_str = self.expand_input_str(self.input_str)
|
||||
|
||||
@classmethod
|
||||
def expand_str(cls, s:str):
|
||||
def expand_input_str(cls, input_str:str):
|
||||
# user convenience syntax '0*32' becomes '00000000000000000000000000000000'
|
||||
if "*" not in s:
|
||||
return s
|
||||
tokens = re.split(r"([^ \t]+)[ \t]*\*[ \t]*([0-9]+)", s)
|
||||
if "*" not in input_str:
|
||||
return input_str
|
||||
# re: "XX * 123" with optional spaces
|
||||
tokens = re.split(r"([^ \t]+)[ \t]*\*[ \t]*([0-9]+)", input_str)
|
||||
if len(tokens) < 3:
|
||||
return s
|
||||
return input_str
|
||||
parts = []
|
||||
for unchanged, snippet, repeat_str in zip(tokens[0::3], tokens[1::3], tokens[2::3]):
|
||||
parts.append(unchanged)
|
||||
repeat = int(repeat_str)
|
||||
parts.append(snippet * repeat)
|
||||
return "".join(parts)
|
||||
|
||||
@classmethod
|
||||
def from_str(cls, s:str):
|
||||
return cls(cls.expand_str(s))
|
||||
return "".join(parts)
|
||||
|
||||
class DecimalRangeSource(InputExpandingParamSource):
|
||||
"""abstract: decimal numbers with a value range"""
|
||||
|
||||
numeric_base = 10
|
||||
|
||||
def __init__(self, num_digits, first_value, last_value):
|
||||
"""
|
||||
See also from_str().
|
||||
def __init__(self, input_str:str=None, num_digits:int=None, first_value:int=None, last_value:int=None):
|
||||
"""Constructor to set up values from a (user entered) string: DecimalRangeSource(input_str).
|
||||
Constructor to set up values directly: DecimalRangeSource(num_digits=3, first_value=123, last_value=456)
|
||||
|
||||
All arguments are integer values, and are converted to int if necessary, so a string of an integer is fine.
|
||||
num_digits: fixed number of digits (possibly with leading zeros) to generate.
|
||||
first_value, last_value: the decimal range in which to provide digits.
|
||||
num_digits produces leading zeros when first_value..last_value are shorter.
|
||||
"""
|
||||
num_digits = int(num_digits)
|
||||
first_value = int(first_value)
|
||||
last_value = int(last_value)
|
||||
assert ((input_str is not None and (num_digits, first_value, last_value) == (None, None, None))
|
||||
or (input_str is None and None not in (num_digits, first_value, last_value)))
|
||||
|
||||
if input_str is not None:
|
||||
super().__init__(input_str)
|
||||
|
||||
input_str = self.input_str
|
||||
|
||||
if ".." in input_str:
|
||||
first_str, last_str = input_str.split('..')
|
||||
first_str = first_str.strip()
|
||||
last_str = last_str.strip()
|
||||
else:
|
||||
first_str = input_str.strip()
|
||||
last_str = None
|
||||
|
||||
num_digits = len(first_str)
|
||||
first_value = int(first_str)
|
||||
last_value = int(last_str if last_str is not None else "9" * num_digits)
|
||||
|
||||
assert num_digits > 0
|
||||
assert first_value <= last_value
|
||||
self.num_digits = num_digits
|
||||
self.val_first_last = (first_value, last_value)
|
||||
self.first_value = first_value
|
||||
self.last_value = last_value
|
||||
|
||||
def val_to_digit(self, val:int):
|
||||
return "%0*d" % (self.num_digits, val) # pylint: disable=consider-using-f-string
|
||||
|
||||
@classmethod
|
||||
def from_str(cls, s:str):
|
||||
s = cls.expand_str(s)
|
||||
|
||||
if ".." in s:
|
||||
first_str, last_str = s.split('..')
|
||||
first_str = first_str.strip()
|
||||
last_str = last_str.strip()
|
||||
else:
|
||||
first_str = s.strip()
|
||||
last_str = None
|
||||
|
||||
first_value = int(first_str)
|
||||
last_value = int(last_str) if last_str is not None else "9" * len(first_str)
|
||||
return cls(num_digits=len(first_str), first_value=first_value, last_value=last_value)
|
||||
|
||||
class RandomSourceMixin:
|
||||
random_impl = secrets.SystemRandom()
|
||||
|
||||
class RandomDigitSource(DecimalRangeSource, RandomSourceMixin):
|
||||
class RandomDigitSource(DecimalRangeSource):
|
||||
"""return a different sequence of random decimal digits each"""
|
||||
name = "random decimal digits"
|
||||
used_keys = set()
|
||||
|
||||
def get_next(self, csv_row:dict=None):
|
||||
# try to generate random digits that are always different from previously produced random bytes
|
||||
attempts = 10
|
||||
while True:
|
||||
val = self.random_impl.randint(*self.val_first_last)
|
||||
if val in RandomDigitSource.used_keys:
|
||||
attempts -= 1
|
||||
if attempts:
|
||||
continue
|
||||
RandomDigitSource.used_keys.add(val)
|
||||
break
|
||||
val = random.randint(self.first_value, self.last_value) # TODO secure random source?
|
||||
return self.val_to_digit(val)
|
||||
|
||||
class RandomHexDigitSource(InputExpandingParamSource, RandomSourceMixin):
|
||||
class RandomHexDigitSource(InputExpandingParamSource):
|
||||
"""return a different sequence of random hexadecimal digits each"""
|
||||
name = "random hexadecimal digits"
|
||||
numeric_base = 16
|
||||
used_keys = set()
|
||||
|
||||
def __init__(self, num_digits):
|
||||
"""see from_str()"""
|
||||
num_digits = int(num_digits)
|
||||
def __init__(self, input_str:str):
|
||||
super().__init__(input_str)
|
||||
input_str = self.input_str
|
||||
|
||||
num_digits = len(input_str.strip())
|
||||
if num_digits < 1:
|
||||
raise ValueError("zero number of digits")
|
||||
# hex digits always come in two
|
||||
@@ -161,36 +149,24 @@ class RandomHexDigitSource(InputExpandingParamSource, RandomSourceMixin):
|
||||
self.num_digits = num_digits
|
||||
|
||||
def get_next(self, csv_row:dict=None):
|
||||
# try to generate random bytes that are always different from previously produced random bytes
|
||||
attempts = 10
|
||||
while True:
|
||||
val = self.random_impl.randbytes(self.num_digits // 2)
|
||||
if val in RandomHexDigitSource.used_keys:
|
||||
attempts -= 1
|
||||
if attempts:
|
||||
continue
|
||||
RandomHexDigitSource.used_keys.add(val)
|
||||
break
|
||||
|
||||
val = random.randbytes(self.num_digits // 2) # TODO secure random source?
|
||||
return b2h(val)
|
||||
|
||||
@classmethod
|
||||
def from_str(cls, s:str):
|
||||
s = cls.expand_str(s)
|
||||
return cls(num_digits=len(s.strip()))
|
||||
|
||||
class IncDigitSource(DecimalRangeSource):
|
||||
"""incrementing sequence of digits"""
|
||||
name = "incrementing decimal digits"
|
||||
|
||||
def __init__(self, num_digits, first_value, last_value):
|
||||
super().__init__(num_digits, first_value, last_value)
|
||||
def __init__(self, input_str:str=None, num_digits:int=None, first_value:int=None, last_value:int=None):
|
||||
"""input_str: the range of values to iterate. Format: 'FIRST..LAST' (e.g. '0001..9999') or
|
||||
just 'FIRST' (iterates to the maximum value for the given digit width). Leading zeros in
|
||||
FIRST determine the digit width and are preserved in returned values."""
|
||||
super().__init__(input_str, num_digits, first_value, last_value)
|
||||
self.next_val = None
|
||||
self.reset()
|
||||
|
||||
def reset(self):
|
||||
"""Restart from the first value of the defined range passed to __init__()."""
|
||||
self.next_val = self.val_first_last[0]
|
||||
self.next_val = self.first_value
|
||||
|
||||
def get_next(self, csv_row:dict=None):
|
||||
val = self.next_val
|
||||
@@ -200,7 +176,7 @@ class IncDigitSource(DecimalRangeSource):
|
||||
returnval = self.val_to_digit(val)
|
||||
|
||||
val += 1
|
||||
if val > self.val_first_last[1]:
|
||||
if val > self.last_value:
|
||||
self.next_val = None
|
||||
else:
|
||||
self.next_val = val
|
||||
@@ -211,18 +187,17 @@ class CsvSource(ParamSource):
|
||||
"""apply a column from a CSV row, as passed in to ParamSource.get_next(csv_row)"""
|
||||
name = "from CSV"
|
||||
|
||||
def __init__(self, csv_column):
|
||||
"""
|
||||
csv_column: column name indicating the column to use for this parameter.
|
||||
This name is used in get_next(): the caller passes the current CSV row to get_next(), from which
|
||||
CsvSource picks the column with the name matching csv_column.
|
||||
"""
|
||||
self.csv_column = csv_column
|
||||
def __init__(self, input_str:str):
|
||||
"""input_str: the CSV column name to read values from.
|
||||
The caller passes the current CSV row to get_next(), from which CsvSource picks the column matching
|
||||
this name."""
|
||||
super().__init__(input_str)
|
||||
self.csv_column = self.input_str
|
||||
|
||||
def get_next(self, csv_row:dict=None):
|
||||
val = None
|
||||
if csv_row:
|
||||
val = csv_row.get(self.csv_column)
|
||||
if not val:
|
||||
if val is None:
|
||||
raise ParamSourceUndefinedExn(f"no value for CSV column {self.csv_column!r}")
|
||||
return val
|
||||
|
||||
@@ -16,18 +16,14 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import abc
|
||||
import enum
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
import pprint
|
||||
import json
|
||||
from typing import List, Tuple, Generator, Optional
|
||||
|
||||
from construct.core import StreamError
|
||||
from osmocom.tlv import camel_to_snake
|
||||
from osmocom.utils import hexstr
|
||||
from pySim.utils import enc_iccid, dec_iccid, enc_imsi, dec_imsi, h2b, b2h, rpad, sanitize_iccid
|
||||
from pySim.ts_31_102 import EF_AD, EF_UST, EF_Routing_Indicator, EF_SUCI_Calc_Info
|
||||
from pySim.ts_51_011 import EF_SMSP
|
||||
from pySim.esim.saip import param_source
|
||||
from pySim.esim.saip import ProfileElement, ProfileElementSD, ProfileElementSequence
|
||||
@@ -56,6 +52,7 @@ class ClassVarMeta(abc.ABCMeta):
|
||||
x = super().__new__(metacls, name, bases, namespace)
|
||||
for k, v in kwargs.items():
|
||||
setattr(x, k, v)
|
||||
setattr(x, 'name', camel_to_snake(name))
|
||||
return x
|
||||
|
||||
class ConfigurableParameter(abc.ABC, metaclass=ClassVarMeta):
|
||||
@@ -75,7 +72,6 @@ class ConfigurableParameter(abc.ABC, metaclass=ClassVarMeta):
|
||||
min_len: minimum length of an input str; min_len = 4
|
||||
max_len: maximum length of an input str; max_len = 8
|
||||
allow_len: permit only specific lengths; allow_len = (8, 16, 32)
|
||||
numeric_base: indicate hex / decimal, if any; numeric_base = None; numeric_base = 10; numeric_base = 16
|
||||
|
||||
Subclasses may change the meaning of these by overriding validate_val(), for example that the length counts
|
||||
resulting bytes instead of a hexstring length. Most subclasses will be covered by the default validate_val().
|
||||
@@ -131,7 +127,6 @@ class ConfigurableParameter(abc.ABC, metaclass=ClassVarMeta):
|
||||
allow_len = None # a list of specific lengths
|
||||
example_input = None
|
||||
default_source = None # a param_source.ParamSource subclass
|
||||
numeric_base = None # or 10 or 16
|
||||
|
||||
def __init__(self, input_value=None):
|
||||
self.input_value = input_value # the raw input value as given by caller
|
||||
@@ -193,28 +188,19 @@ class ConfigurableParameter(abc.ABC, metaclass=ClassVarMeta):
|
||||
if cls.allow_chars is not None:
|
||||
if any(c not in cls.allow_chars for c in val):
|
||||
raise ValueError(f"invalid characters in input value {val!r}, valid chars are {cls.allow_chars}")
|
||||
elif isinstance(val, io.BytesIO):
|
||||
val = val.getvalue()
|
||||
|
||||
if hasattr(val, '__len__'):
|
||||
val_len = len(val)
|
||||
else:
|
||||
# e.g. int length
|
||||
val_len = len(str(val))
|
||||
|
||||
if cls.allow_len is not None:
|
||||
l = cls.allow_len
|
||||
# cls.allow_len could be one int, or a tuple of ints. Wrap a single int also in a tuple.
|
||||
if not isinstance(l, (tuple, list)):
|
||||
l = (l,)
|
||||
if val_len not in l:
|
||||
raise ValueError(f'length must be one of {cls.allow_len}, not {val_len}: {val!r}')
|
||||
if len(val) not in l:
|
||||
raise ValueError(f'length must be one of {cls.allow_len}, not {len(val)}: {val!r}')
|
||||
if cls.min_len is not None:
|
||||
if val_len < cls.min_len:
|
||||
raise ValueError(f'length must be at least {cls.min_len}, not {val_len}: {val!r}')
|
||||
if len(val) < cls.min_len:
|
||||
raise ValueError(f'length must be at least {cls.min_len}, not {len(val)}: {val!r}')
|
||||
if cls.max_len is not None:
|
||||
if val_len > cls.max_len:
|
||||
raise ValueError(f'length must be at most {cls.max_len}, not {val_len}: {val!r}')
|
||||
if len(val) > cls.max_len:
|
||||
raise ValueError(f'length must be at most {cls.max_len}, not {len(val)}: {val!r}')
|
||||
return val
|
||||
|
||||
@classmethod
|
||||
@@ -223,26 +209,6 @@ class ConfigurableParameter(abc.ABC, metaclass=ClassVarMeta):
|
||||
Write the given val in the right format in all the right places in pes."""
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def get_value_from_pes(cls, pes: ProfileElementSequence):
|
||||
"""Same as get_values_from_pes() but expecting a single value.
|
||||
get_values_from_pes() may return values like this:
|
||||
[{ 'AlgorithmID': 'Milenage' }, { 'AlgorithmID': 'Milenage' }]
|
||||
This ensures that all these entries are identical and would return only
|
||||
{ 'AlgorithmID': 'Milenage' }.
|
||||
|
||||
This is relevant for any profile element that may appear multiple times in the same PES (only a few),
|
||||
where each occurrence should reflect the same value (all currently known parameters).
|
||||
"""
|
||||
|
||||
val = None
|
||||
for v in cls.get_values_from_pes(pes):
|
||||
if val is None:
|
||||
val = v
|
||||
elif val != v:
|
||||
raise ValueError(f'get_value_from_pes(): got distinct values: {val!r} != {v!r}')
|
||||
return val
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def get_values_from_pes(cls, pes: ProfileElementSequence) -> Generator:
|
||||
@@ -292,16 +258,7 @@ class ConfigurableParameter(abc.ABC, metaclass=ClassVarMeta):
|
||||
May be overridden by subclasses.
|
||||
This default implementation returns the maximum allowed value length -- a good fit for most subclasses.
|
||||
'''
|
||||
l = cls.get_len_range()[1] or 16
|
||||
l = min(10*80, l)
|
||||
return l
|
||||
|
||||
@classmethod
|
||||
def is_super_of(cls, other_class):
|
||||
try:
|
||||
return issubclass(other_class, cls)
|
||||
except TypeError:
|
||||
return False
|
||||
return cls.get_len_range()[1] or 16
|
||||
|
||||
class DecimalParam(ConfigurableParameter):
|
||||
"""Decimal digits. The input value may be a string of decimal digits like '012345', or an int. The output of
|
||||
@@ -309,7 +266,6 @@ class DecimalParam(ConfigurableParameter):
|
||||
"""
|
||||
allow_types = (str, int)
|
||||
allow_chars = '0123456789'
|
||||
numeric_base = 10
|
||||
|
||||
@classmethod
|
||||
def validate_val(cls, val):
|
||||
@@ -333,6 +289,7 @@ class DecimalHexParam(DecimalParam):
|
||||
@classmethod
|
||||
def validate_val(cls, val):
|
||||
val = super().validate_val(val)
|
||||
assert isinstance(val, str)
|
||||
val = ''.join('%02x' % ord(x) for x in val)
|
||||
if cls.rpad is not None:
|
||||
c = cls.rpad_char
|
||||
@@ -342,7 +299,7 @@ class DecimalHexParam(DecimalParam):
|
||||
|
||||
@classmethod
|
||||
def decimal_hex_to_str(cls, val):
|
||||
'useful for get_values_from_pes() implementations of subclasses'
|
||||
"""useful for get_values_from_pes() implementations of subclasses"""
|
||||
if isinstance(val, bytes):
|
||||
val = b2h(val)
|
||||
assert isinstance(val, hexstr)
|
||||
@@ -354,7 +311,6 @@ class DecimalHexParam(DecimalParam):
|
||||
class IntegerParam(ConfigurableParameter):
|
||||
allow_types = (str, int)
|
||||
allow_chars = '0123456789'
|
||||
numeric_base = 10
|
||||
|
||||
# two integers, if the resulting int should be range limited
|
||||
min_val = None
|
||||
@@ -384,19 +340,14 @@ class IntegerParam(ConfigurableParameter):
|
||||
yield valdict
|
||||
|
||||
class BinaryParam(ConfigurableParameter):
|
||||
allow_types = (str, io.BytesIO, bytes, bytearray, int)
|
||||
allow_types = (str, io.BytesIO, bytes, bytearray)
|
||||
allow_chars = '0123456789abcdefABCDEF'
|
||||
strip_chars = ' \t\r\n'
|
||||
numeric_base = 16
|
||||
default_source = param_source.RandomHexDigitSource
|
||||
|
||||
@classmethod
|
||||
def validate_val(cls, val):
|
||||
# take care that min_len and max_len are applied to the binary length by converting to bytes first
|
||||
if isinstance(val, int):
|
||||
min_len, _max_len = cls.get_len_range()
|
||||
val = '%0*d' % (min_len, val)
|
||||
|
||||
if isinstance(val, str):
|
||||
if cls.strip_chars is not None:
|
||||
val = ''.join(c for c in val if c not in cls.strip_chars)
|
||||
@@ -423,67 +374,69 @@ class BinaryParam(ConfigurableParameter):
|
||||
|
||||
|
||||
class EnumParam(ConfigurableParameter):
|
||||
value_map = {
|
||||
# For example:
|
||||
#'Meaningful label for value 23': 0x23,
|
||||
# Where 0x23 is a valid value to use for apply_val().
|
||||
}
|
||||
_value_map_reverse = None
|
||||
"""ConfigurableParameter for named integer enumeration values.
|
||||
|
||||
Subclasses must define a nested enum.IntEnum named 'Values' listing all valid names and their
|
||||
integer codes. apply_val() and get_values_from_pes() are not implemented here and this must
|
||||
be inherited from another mixin."""
|
||||
|
||||
class Values(enum.IntEnum):
|
||||
pass # subclasses override this
|
||||
|
||||
@classmethod
|
||||
def validate_val(cls, val):
|
||||
orig_val = val
|
||||
enum_val = None
|
||||
if isinstance(val, str):
|
||||
enum_name = val
|
||||
enum_val = cls.map_name_to_val(enum_name)
|
||||
def validate_val(cls, val) -> int:
|
||||
if isinstance(val, int):
|
||||
try:
|
||||
return int(cls.Values(val))
|
||||
except ValueError:
|
||||
pass
|
||||
elif isinstance(val, str):
|
||||
member = cls.map_name_to_val(val, strict=False)
|
||||
if member is not None:
|
||||
return member
|
||||
|
||||
# if the str is not one of the known value_map.keys(), is it maybe one of value_map.keys()?
|
||||
if enum_val is None and val in cls.value_map.values():
|
||||
enum_val = val
|
||||
|
||||
if enum_val not in cls.value_map.values():
|
||||
raise ValueError(f"{cls.get_name()}: invalid argument: {orig_val!r}. Valid arguments are:"
|
||||
f" {', '.join(cls.value_map.keys())}")
|
||||
|
||||
return enum_val
|
||||
valid = ', '.join(m.name for m in cls.Values)
|
||||
raise ValueError(f"{cls.get_name()}: invalid argument: {val!r}. Valid arguments are: {valid}")
|
||||
|
||||
@classmethod
|
||||
def map_name_to_val(cls, name:str, strict=True):
|
||||
val = cls.value_map.get(name)
|
||||
if val is not None:
|
||||
return val
|
||||
def map_name_to_val(cls, name: str, strict=True) -> int:
|
||||
"""Return the integer value for a given enum member name. Performs an exact match first,
|
||||
then falls back to fuzzy matching (case-insensitive, punctuation-insensitive)."""
|
||||
try:
|
||||
return int(cls.Values[name])
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
clean_name = cls.clean_name_str(name)
|
||||
for k, v in cls.value_map.items():
|
||||
if clean_name == cls.clean_name_str(k):
|
||||
return v
|
||||
clean = cls.clean_name_str(name)
|
||||
for member in cls.Values:
|
||||
if cls.clean_name_str(member.name) == clean:
|
||||
return int(member)
|
||||
|
||||
if strict:
|
||||
raise ValueError(f"Problem in {cls.get_name()}: {name!r} is not a known value."
|
||||
f" Known values are: {cls.value_map.keys()!r}")
|
||||
valid = ', '.join(m.name for m in cls.Values)
|
||||
raise ValueError(f"{cls.get_name()}: {name!r} is not a known value. Known values are: {valid}")
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def map_val_to_name(cls, val, strict=False) -> str:
|
||||
if cls._value_map_reverse is None:
|
||||
cls._value_map_reverse = dict((v, k) for k, v in cls.value_map.items())
|
||||
|
||||
name = cls._value_map_reverse.get(val)
|
||||
if name:
|
||||
return name
|
||||
if strict:
|
||||
raise ValueError(f"Problem in {cls.get_name()}: {val!r} ({type(val)}) is not a known value."
|
||||
f" Known values are: {cls.value_map.values()!r}")
|
||||
return None
|
||||
"""Return the enum member name for a given integer value."""
|
||||
try:
|
||||
return cls.Values(val).name
|
||||
except ValueError:
|
||||
if strict:
|
||||
raise ValueError(f"{cls.get_name()}: {val!r} ({type(val).__name__}) is not a known value.")
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def name_normalize(cls, name:str) -> str:
|
||||
return cls.map_val_to_name(cls.map_name_to_val(name))
|
||||
def name_normalize(cls, name: str) -> str:
|
||||
"""Map a (possibly fuzzy) name to its canonical enum member name."""
|
||||
return cls.Values(cls.map_name_to_val(name)).name
|
||||
|
||||
@classmethod
|
||||
def clean_name_str(cls, val):
|
||||
return re.sub('[^0-9A-Za-z-_]', '', val).lower()
|
||||
def clean_name_str(cls, val: str) -> str:
|
||||
"""Strip punctuation and case for fuzzy name comparison.
|
||||
Treats hyphens and underscores as equivalent (both removed)."""
|
||||
return re.sub('[^0-9A-Za-z]', '', val).lower()
|
||||
|
||||
|
||||
class Iccid(DecimalParam):
|
||||
@@ -559,7 +512,6 @@ class SmspTpScAddr(ConfigurableParameter):
|
||||
name = 'SMSP-TP-SC-ADDR'
|
||||
allow_chars = '+0123456789'
|
||||
strip_chars = ' \t\r\n'
|
||||
numeric_base = 10
|
||||
max_len = 21 # '+' and 20 digits
|
||||
min_len = 1
|
||||
example_input = '+49301234567'
|
||||
@@ -621,129 +573,44 @@ class SmspTpScAddr(ConfigurableParameter):
|
||||
ef_smsp_dec['tp_sc_addr']['ton_npi']['type_of_number'] = 'international' if international else 'unknown'
|
||||
# ensure the parameter_indicators.tp_sc_addr is True
|
||||
ef_smsp_dec['parameter_indicators']['tp_sc_addr'] = True
|
||||
|
||||
# alpha_id padding: to make room for a human readable SMSC name that can be provisioned to the profile later
|
||||
# on, alpha_id needs to be empty but padded 0xff to some length.
|
||||
# - alpha_id is optional, setting alpha_id = '' ensures the IE is present.
|
||||
# - the length of the file is 28+Y where Y is the length of the alpha_id -- here the intended length of our padding
|
||||
# (see 3GPP TS 31.102 4.2.27 EF.SMSP). So if we want a maximum length of alpha_id = 14, we set the total
|
||||
# file size to 28+14 = 42.
|
||||
# - this file size has to go in two places: encode_record_bin() needs to know the length to encode the right
|
||||
# length of fillFileContent.
|
||||
# - the f_smsp needs to show the right file size in the PES, as in
|
||||
# 'ef-smsp': [('fileDescriptor', {'efFileSize': '2a', ...
|
||||
# (where 2a == 42)
|
||||
# - To generate the right amount of fillFileContent, pass total_len=42 to encode_record_bin().
|
||||
# - To show the right size in the PES, set f_smsp.rec_len = 42
|
||||
ef_smsp_dec['alpha_id'] = ''
|
||||
|
||||
# we can set this to choose a fixed length:
|
||||
#f_smsp.rec_len = 42
|
||||
# but leave rec_len unchanged to keep the same length as was found in the eSIM template.
|
||||
|
||||
# re-encode into the File body.
|
||||
f_smsp.body = ef_smsp.encode_record_bin(ef_smsp_dec, 1, total_len=f_smsp.rec_len)
|
||||
# re-encode into the File body
|
||||
f_smsp.body = ef_smsp.encode_record_bin(ef_smsp_dec, 1)
|
||||
#print("SMSP (new): %s" % f_smsp.body)
|
||||
# re-generate the pe.decoded member from the File instance
|
||||
pe.file2pe(f_smsp)
|
||||
|
||||
@classmethod
|
||||
def get_values_from_pes(cls, pes: ProfileElementSequence):
|
||||
for pe in pes.get_pes_for_type('usim'):
|
||||
f_smsp = pe.files.get('ef-smsp', None)
|
||||
if f_smsp is None:
|
||||
continue
|
||||
|
||||
try:
|
||||
ef_smsp = EF_SMSP()
|
||||
ef_smsp_dec = ef_smsp.decode_record_bin(f_smsp.body, 1)
|
||||
except IndexError:
|
||||
continue
|
||||
f_smsp = pe.files['ef-smsp']
|
||||
ef_smsp = EF_SMSP()
|
||||
ef_smsp_dec = ef_smsp.decode_record_bin(f_smsp.body, 1)
|
||||
|
||||
tp_sc_addr = ef_smsp_dec.get('tp_sc_addr', None)
|
||||
if not tp_sc_addr:
|
||||
continue
|
||||
|
||||
digits = tp_sc_addr.get('call_number', None)
|
||||
if not digits:
|
||||
continue
|
||||
|
||||
ton_npi = tp_sc_addr.get('ton_npi', None)
|
||||
if not ton_npi:
|
||||
continue
|
||||
international = ton_npi.get('type_of_number', None)
|
||||
if international is None:
|
||||
continue
|
||||
international = (international == 'international')
|
||||
|
||||
yield { cls.name: cls.tuple_to_str((international, digits)) }
|
||||
|
||||
|
||||
class MncLen(ConfigurableParameter):
|
||||
"""MNC length. Must be either 2 or 3. Sets only the MNC length field in EF-AD (Administrative Data)."""
|
||||
name = 'MNC-LEN'
|
||||
allow_chars = '23'
|
||||
strip_chars = ' \t\r\n'
|
||||
numeric_base = 10
|
||||
max_len = 1
|
||||
min_len = 1
|
||||
example_input = '2'
|
||||
default_source = param_source.ConstantSource
|
||||
|
||||
@classmethod
|
||||
def validate_val(cls, val):
|
||||
val = super().validate_val(val)
|
||||
val = int(val)
|
||||
if val not in (2, 3):
|
||||
raise ValueError(f"MNC-LEN must be either 2 or 3, not {val!r}")
|
||||
return val
|
||||
|
||||
@classmethod
|
||||
def apply_val(cls, pes: ProfileElementSequence, val):
|
||||
"""val must be an int: either 2 or 3"""
|
||||
for pe in pes.get_pes_for_type('usim'):
|
||||
if not hasattr(pe, 'files'):
|
||||
continue
|
||||
f_ad = pe.files.get('ef-ad')
|
||||
if not f_ad:
|
||||
continue
|
||||
# decode existing values
|
||||
if not f_ad.body:
|
||||
continue
|
||||
try:
|
||||
ef_ad = EF_AD()
|
||||
ef_ad_dec = ef_ad.decode_bin(f_ad.body)
|
||||
except StreamError:
|
||||
continue
|
||||
if 'mnc_len' not in ef_ad_dec:
|
||||
continue
|
||||
# change mnc_len
|
||||
ef_ad_dec['mnc_len'] = val
|
||||
# re-encode into the File body
|
||||
f_ad.body = ef_ad.encode_bin(ef_ad_dec)
|
||||
pe.file2pe(f_ad)
|
||||
|
||||
@classmethod
|
||||
def get_values_from_pes(cls, pes: ProfileElementSequence):
|
||||
for pe in pes.get_pes_for_type('usim'):
|
||||
if not hasattr(pe, 'files'):
|
||||
continue
|
||||
f_ad = pe.files.get('ef-ad', None)
|
||||
if f_ad is None:
|
||||
continue
|
||||
|
||||
try:
|
||||
ef_ad = EF_AD()
|
||||
ef_ad_dec = ef_ad.decode_bin(f_ad.body)
|
||||
except StreamError:
|
||||
continue
|
||||
|
||||
mnc_len = ef_ad_dec.get('mnc_len', None)
|
||||
if mnc_len is None:
|
||||
continue
|
||||
|
||||
yield { cls.name: str(mnc_len) }
|
||||
|
||||
|
||||
class SdKey(BinaryParam):
|
||||
"""Configurable Security Domain (SD) Key. Value is presented as bytes.
|
||||
Non-abstract implementations are generated in SdKey.generate_sd_key_classes"""
|
||||
class SdKey(BinaryParam, metaclass=ClassVarMeta):
|
||||
"""Configurable Security Domain (SD) Key. Value is presented as bytes."""
|
||||
# these will be set by subclasses
|
||||
key_type = None
|
||||
kvn = None
|
||||
reserved_kvn = tuple() # tuple of all reserved kvn for a given SCPxx
|
||||
key_id = None
|
||||
kvn = None
|
||||
key_usage_qual = None
|
||||
|
||||
@classmethod
|
||||
@@ -761,16 +628,11 @@ class SdKey(BinaryParam):
|
||||
key = SecurityDomainKey(
|
||||
key_version_number=cls.kvn,
|
||||
key_id=cls.key_id,
|
||||
key_usage_qualifier=cls.key_usage_qual,
|
||||
key_usage_qualifier=KeyUsageQualifier.build(cls.key_usage_qual),
|
||||
key_components=set_components,
|
||||
)
|
||||
pe.add_key(key)
|
||||
else:
|
||||
# A key of this KVN and ID already exists in the profile.
|
||||
|
||||
# Keep the key_usage_qualifier as it was in the profile, so skip this here:
|
||||
# key.key_usage_qualifier = cls.key_usage_qual
|
||||
|
||||
key.key_components = set_components
|
||||
|
||||
@classmethod
|
||||
@@ -787,162 +649,60 @@ class SdKey(BinaryParam):
|
||||
if kc:
|
||||
yield { cls.name: b2h(kc) }
|
||||
|
||||
class SdKeyScp80_01(SdKey, kvn=0x01, key_type=0x88, permitted_len=[16,24,32]): # AES key type
|
||||
pass
|
||||
class SdKeyScp80_01Kic(SdKeyScp80_01, key_id=0x01, key_usage_qual=0x18): # FIXME: ordering?
|
||||
pass
|
||||
class SdKeyScp80_01Kid(SdKeyScp80_01, key_id=0x02, key_usage_qual=0x14):
|
||||
pass
|
||||
class SdKeyScp80_01Kik(SdKeyScp80_01, key_id=0x03, key_usage_qual=0x48):
|
||||
pass
|
||||
|
||||
NO_OP = (('', {}))
|
||||
class SdKeyScp81_01(SdKey, kvn=0x81): # FIXME
|
||||
pass
|
||||
class SdKeyScp81_01Psk(SdKeyScp81_01, key_id=0x01, key_type=0x85, key_usage_qual=0x3C):
|
||||
pass
|
||||
class SdKeyScp81_01Dek(SdKeyScp81_01, key_id=0x02, key_type=0x88, key_usage_qual=0x48):
|
||||
pass
|
||||
|
||||
LEN_128 = (16,)
|
||||
LEN_128_192_256 = (16, 24, 32)
|
||||
LEN_128_256 = (16, 32)
|
||||
class SdKeyScp02_20(SdKey, kvn=0x20, key_type=0x88, permitted_len=[16,24,32]): # AES key type
|
||||
pass
|
||||
class SdKeyScp02_20Enc(SdKeyScp02_20, key_id=0x01, key_usage_qual=0x18):
|
||||
pass
|
||||
class SdKeyScp02_20Mac(SdKeyScp02_20, key_id=0x02, key_usage_qual=0x14):
|
||||
pass
|
||||
class SdKeyScp02_20Dek(SdKeyScp02_20, key_id=0x03, key_usage_qual=0x48):
|
||||
pass
|
||||
|
||||
DES = ('DES', dict(key_type=KeyType.des, allow_len=LEN_128) )
|
||||
AES = ('AES', dict(key_type=KeyType.aes, allow_len=LEN_128_192_256) )
|
||||
class SdKeyScp03_30(SdKey, kvn=0x30, key_type=0x88, permitted_len=[16,24,32]): # AES key type
|
||||
pass
|
||||
class SdKeyScp03_30Enc(SdKeyScp03_30, key_id=0x01, key_usage_qual=0x18):
|
||||
pass
|
||||
class SdKeyScp03_30Mac(SdKeyScp03_30, key_id=0x02, key_usage_qual=0x14):
|
||||
pass
|
||||
class SdKeyScp03_30Dek(SdKeyScp03_30, key_id=0x03, key_usage_qual=0x48):
|
||||
pass
|
||||
|
||||
ENC = ('ENC', dict(key_id=0x01, key_usage_qual=0x18) )
|
||||
MAC = ('MAC', dict(key_id=0x02, key_usage_qual=0x14) )
|
||||
DEK = ('DEK', dict(key_id=0x03, key_usage_qual=0x48) )
|
||||
class SdKeyScp03_31(SdKey, kvn=0x31, key_type=0x88, permitted_len=[16,24,32]): # AES key type
|
||||
pass
|
||||
class SdKeyScp03_31Enc(SdKeyScp03_31, key_id=0x01, key_usage_qual=0x18):
|
||||
pass
|
||||
class SdKeyScp03_31Mac(SdKeyScp03_31, key_id=0x02, key_usage_qual=0x14):
|
||||
pass
|
||||
class SdKeyScp03_31Dek(SdKeyScp03_31, key_id=0x03, key_usage_qual=0x48):
|
||||
pass
|
||||
|
||||
TLSPSK_PSK = ('TLSPSK', dict(key_type=KeyType.tls_psk, key_id=0x01, key_usage_qual=0x3c, allow_len=LEN_128_192_256) )
|
||||
TLSPSK_DEK = ('DEK', dict(key_id=0x02, key_usage_qual=0x48) )
|
||||
|
||||
# THIS IS THE LIST that controls which SdKeyXxx subclasses exist:
|
||||
SD_KEY_DEFS = (
|
||||
# name KVN x variants x variants
|
||||
('SCP02', (0x20, 0x21, 0x22, 0xff), (AES, ), (ENC, MAC, DEK) ),
|
||||
('SCP03', (0x30, 0x31, 0x32), (AES, ), (ENC, MAC, DEK) ),
|
||||
('SCP80', (0x01, 0x02, 0x03), (DES, AES), (ENC, MAC, DEK) ),
|
||||
|
||||
# key_id=1
|
||||
('SCP81', (0x40, 0x41, 0x42), (TLSPSK_PSK, ), ),
|
||||
# key_id=2
|
||||
('SCP81', (0x40, 0x41, 0x42), (DES, AES), (TLSPSK_DEK, ) ),
|
||||
)
|
||||
|
||||
all_implementations = None
|
||||
|
||||
@classmethod
|
||||
def generate_sd_key_classes(cls, sd_key_defs=SD_KEY_DEFS):
|
||||
'''This generates python classes to be exported in this module, as subclasses of class SdKey.
|
||||
|
||||
We create SdKey subclasses dynamically from a list.
|
||||
You can list all of them via:
|
||||
from pySim.esim.saip.personalization import SdKey
|
||||
SdKey.all_implementations
|
||||
or
|
||||
print('\n'.join(sorted(f'{x.__name__}\t{x.name}' for x in SdKey.all_implementations)))
|
||||
|
||||
at time of writing this comment, this prints:
|
||||
|
||||
SdKeyScp02Kvn20AesDek SCP02-KVN20-AES-DEK
|
||||
SdKeyScp02Kvn20AesEnc SCP02-KVN20-AES-ENC
|
||||
SdKeyScp02Kvn20AesMac SCP02-KVN20-AES-MAC
|
||||
SdKeyScp02Kvn21AesDek SCP02-KVN21-AES-DEK
|
||||
SdKeyScp02Kvn21AesEnc SCP02-KVN21-AES-ENC
|
||||
SdKeyScp02Kvn21AesMac SCP02-KVN21-AES-MAC
|
||||
SdKeyScp02Kvn22AesDek SCP02-KVN22-AES-DEK
|
||||
SdKeyScp02Kvn22AesEnc SCP02-KVN22-AES-ENC
|
||||
SdKeyScp02Kvn22AesMac SCP02-KVN22-AES-MAC
|
||||
SdKeyScp02KvnffAesDek SCP02-KVNff-AES-DEK
|
||||
SdKeyScp02KvnffAesEnc SCP02-KVNff-AES-ENC
|
||||
SdKeyScp02KvnffAesMac SCP02-KVNff-AES-MAC
|
||||
SdKeyScp03Kvn30AesDek SCP03-KVN30-AES-DEK
|
||||
SdKeyScp03Kvn30AesEnc SCP03-KVN30-AES-ENC
|
||||
SdKeyScp03Kvn30AesMac SCP03-KVN30-AES-MAC
|
||||
SdKeyScp03Kvn31AesDek SCP03-KVN31-AES-DEK
|
||||
SdKeyScp03Kvn31AesEnc SCP03-KVN31-AES-ENC
|
||||
SdKeyScp03Kvn31AesMac SCP03-KVN31-AES-MAC
|
||||
SdKeyScp03Kvn32AesDek SCP03-KVN32-AES-DEK
|
||||
SdKeyScp03Kvn32AesEnc SCP03-KVN32-AES-ENC
|
||||
SdKeyScp03Kvn32AesMac SCP03-KVN32-AES-MAC
|
||||
SdKeyScp80Kvn01AesDek SCP80-KVN01-AES-DEK
|
||||
SdKeyScp80Kvn01AesEnc SCP80-KVN01-AES-ENC
|
||||
SdKeyScp80Kvn01AesMac SCP80-KVN01-AES-MAC
|
||||
SdKeyScp80Kvn01DesDek SCP80-KVN01-DES-DEK
|
||||
SdKeyScp80Kvn01DesEnc SCP80-KVN01-DES-ENC
|
||||
SdKeyScp80Kvn01DesMac SCP80-KVN01-DES-MAC
|
||||
SdKeyScp80Kvn02AesDek SCP80-KVN02-AES-DEK
|
||||
SdKeyScp80Kvn02AesEnc SCP80-KVN02-AES-ENC
|
||||
SdKeyScp80Kvn02AesMac SCP80-KVN02-AES-MAC
|
||||
SdKeyScp80Kvn02DesDek SCP80-KVN02-DES-DEK
|
||||
SdKeyScp80Kvn02DesEnc SCP80-KVN02-DES-ENC
|
||||
SdKeyScp80Kvn02DesMac SCP80-KVN02-DES-MAC
|
||||
SdKeyScp80Kvn03AesDek SCP80-KVN03-AES-DEK
|
||||
SdKeyScp80Kvn03AesEnc SCP80-KVN03-AES-ENC
|
||||
SdKeyScp80Kvn03AesMac SCP80-KVN03-AES-MAC
|
||||
SdKeyScp80Kvn03DesDek SCP80-KVN03-DES-DEK
|
||||
SdKeyScp80Kvn03DesEnc SCP80-KVN03-DES-ENC
|
||||
SdKeyScp80Kvn03DesMac SCP80-KVN03-DES-MAC
|
||||
SdKeyScp81Kvn40AesDek SCP81-KVN40-AES-DEK
|
||||
SdKeyScp81Kvn40DesDek SCP81-KVN40-DES-DEK
|
||||
SdKeyScp81Kvn40Tlspsk SCP81-KVN40-TLSPSK
|
||||
SdKeyScp81Kvn41AesDek SCP81-KVN41-AES-DEK
|
||||
SdKeyScp81Kvn41DesDek SCP81-KVN41-DES-DEK
|
||||
SdKeyScp81Kvn41Tlspsk SCP81-KVN41-TLSPSK
|
||||
SdKeyScp81Kvn42AesDek SCP81-KVN42-AES-DEK
|
||||
SdKeyScp81Kvn42DesDek SCP81-KVN42-DES-DEK
|
||||
SdKeyScp81Kvn42Tlspsk SCP81-KVN42-TLSPSK
|
||||
'''
|
||||
|
||||
SdKey.all_implementations = []
|
||||
|
||||
def camel(s):
|
||||
return s[:1].upper() + s[1:].lower()
|
||||
|
||||
def do_variants(name, kvn, remaining_variants, labels=[], attrs={}):
|
||||
'recurse to unfold as many variants as there may be'
|
||||
if remaining_variants:
|
||||
# not a leaf node, collect more labels and attrs
|
||||
variants = remaining_variants[0]
|
||||
remaining_variants = remaining_variants[1:]
|
||||
|
||||
for label, valdict in variants:
|
||||
# pass copies to recursion
|
||||
inner_labels = list(labels)
|
||||
inner_attrs = dict(attrs)
|
||||
|
||||
inner_labels.append(label)
|
||||
inner_attrs.update(valdict)
|
||||
do_variants(name, kvn, remaining_variants,
|
||||
labels=inner_labels,
|
||||
attrs=inner_attrs)
|
||||
return
|
||||
|
||||
# leaf node. create a new class with all the accumulated vals
|
||||
parts = [name, f'KVN{kvn:02x}',] + labels
|
||||
cls_label = '-'.join(p for p in parts if p)
|
||||
|
||||
parts = ['Sd', 'Key', name, f'Kvn{kvn:02x}'] + labels
|
||||
clsname = ''.join(camel(p) for p in parts)
|
||||
|
||||
max_key_len = attrs.get('allow_len')[-1]
|
||||
|
||||
attrs.update({
|
||||
'name' : cls_label,
|
||||
'kvn': kvn,
|
||||
'example_input': f'00*{max_key_len}',
|
||||
})
|
||||
|
||||
# below line is like
|
||||
# class SdKeyScpNNKvnXXYyyZzz(SdKey):
|
||||
# <set attrs>
|
||||
cls_def = type(clsname, (cls,), attrs)
|
||||
|
||||
# for some unknown reason, subclassing from abc.ABC makes cls_def.__module__ == 'abc',
|
||||
# but we don't want 'abc.SdKeyScp03Kvn32AesEnc'.
|
||||
# Make sure it is 'pySim.esim.saip.personalization.SdKeyScp03Kvn32AesEnc'
|
||||
cls_def.__module__ = __name__
|
||||
|
||||
globals()[clsname] = cls_def
|
||||
SdKey.all_implementations.append(cls_def)
|
||||
class SdKeyScp03_32(SdKey, kvn=0x32, key_type=0x88, permitted_len=[16,24,32]): # AES key type
|
||||
pass
|
||||
class SdKeyScp03_32Enc(SdKeyScp03_32, key_id=0x01, key_usage_qual=0x18):
|
||||
pass
|
||||
class SdKeyScp03_32Mac(SdKeyScp03_32, key_id=0x02, key_usage_qual=0x14):
|
||||
pass
|
||||
class SdKeyScp03_32Dek(SdKeyScp03_32, key_id=0x03, key_usage_qual=0x48):
|
||||
pass
|
||||
|
||||
|
||||
for items in sd_key_defs:
|
||||
name, kvns = items[:2]
|
||||
variants = items[2:]
|
||||
for kvn in kvns:
|
||||
do_variants(name, kvn, variants)
|
||||
|
||||
# this creates all of the classes named like SdKeyScp02Kvn20AesDek to be published in this python module:
|
||||
SdKey.generate_sd_key_classes()
|
||||
|
||||
def obtain_all_pe_from_pelist(l: List[ProfileElement], wanted_type: str) -> ProfileElement:
|
||||
return (pe for pe in l if pe.type == wanted_type)
|
||||
@@ -1029,7 +789,7 @@ class Pin(DecimalHexParam):
|
||||
|
||||
for pinCode in pinCodes.decoded['pinCodes'][1]:
|
||||
if pinCode['keyReference'] == cls.keyReference:
|
||||
yield { cls.name: cls.decimal_hex_to_str(pinCode['pinValue']) }
|
||||
yield { cls.name: cls.decimal_hex_to_str(pinCode['pinValue']) }
|
||||
|
||||
@classmethod
|
||||
def get_values_from_pes(cls, pes: ProfileElementSequence):
|
||||
@@ -1107,22 +867,20 @@ class AlgoConfig(ConfigurableParameter):
|
||||
yield { cls.name: val }
|
||||
|
||||
class AlgorithmID(EnumParam, AlgoConfig):
|
||||
'''use validate_val() from EnumParam, and apply_val() from AlgoConfig.
|
||||
In get_values_from_pes(), return enum value names, not raw values.'''
|
||||
"""use validate_val() from EnumParam, and apply_val() from AlgoConfig.
|
||||
In get_values_from_pes(), return enum value names, not raw values."""
|
||||
name = "Algorithm"
|
||||
|
||||
# as in pySim/esim/asn1/saip/PE_Definitions-3.3.1.asn
|
||||
value_map = {
|
||||
"Milenage" : 1,
|
||||
"TUAK" : 2,
|
||||
"usim-test" : 3,
|
||||
}
|
||||
algo_config_key = 'algorithmID'
|
||||
example_input = "Milenage"
|
||||
default_source = param_source.ConstantSource
|
||||
|
||||
algo_config_key = 'algorithmID'
|
||||
# as in pySim/esim/asn1/saip/PE_Definitions-3.3.1.asn
|
||||
class Values(enum.IntEnum):
|
||||
Milenage = 1
|
||||
TUAK = 2
|
||||
usim_test = 3 # input 'usim-test' also accepted via fuzzy matching
|
||||
|
||||
# EnumParam.validate_val() returns the int values from value_map
|
||||
# EnumParam.validate_val() returns the int values from Values
|
||||
|
||||
@classmethod
|
||||
def get_values_from_pes(cls, pes: ProfileElementSequence):
|
||||
@@ -1197,184 +955,3 @@ class TuakNumberOfKeccak(IntegerParam, AlgoConfig):
|
||||
max_val = 255
|
||||
example_input = '1'
|
||||
default_source = param_source.ConstantSource
|
||||
|
||||
|
||||
class EfUstServiceParam(EnumParam):
|
||||
"""superclass for EF-UST service flag parameters"""
|
||||
service_idx = 0
|
||||
value_map = { 'enabled': True, 'disabled': False }
|
||||
default_source = param_source.ConstantSource
|
||||
example_input = sorted(value_map.keys())[0]
|
||||
|
||||
@classmethod
|
||||
def apply_val(cls, pes: ProfileElementSequence, val):
|
||||
for pe in pes.get_pes_for_type('usim'):
|
||||
f_ust = pe.files['ef-ust']
|
||||
ef_ust = EF_UST()
|
||||
ust = ef_ust.decode_bin(f_ust.body)
|
||||
|
||||
ust[cls.service_idx]['activated'] = val
|
||||
|
||||
f_ust.body = ef_ust.encode_bin(ust)
|
||||
pe.file2pe(f_ust)
|
||||
|
||||
@classmethod
|
||||
def get_values_from_pes(cls, pes: ProfileElementSequence):
|
||||
for pe in pes.get_pes_for_type('usim'):
|
||||
f_ust = pe.files.get('ef-ust', None)
|
||||
if not f_ust:
|
||||
continue
|
||||
ef_ust = EF_UST()
|
||||
try:
|
||||
ust = ef_ust.decode_bin(f_ust.body)
|
||||
|
||||
service_flag = ust[cls.service_idx]['activated']
|
||||
yield { cls.name: cls.map_val_to_name(service_flag) }
|
||||
except:
|
||||
pass
|
||||
|
||||
class SuciActive(EfUstServiceParam):
|
||||
"""EF-UST service nr 124: enable or disable the SUCI service."""
|
||||
service_idx = 124
|
||||
name = '5G-SUCI-active'
|
||||
value_map = { 'SUCI-on': True, 'SUCI-off': False }
|
||||
example_input = sorted(value_map.keys())[0]
|
||||
|
||||
class SuciInUsim(EfUstServiceParam):
|
||||
"""EF-UST service nr 125: calculate SUCI in UE or in USIM"""
|
||||
service_idx = 125
|
||||
name = '5G-SUCI-in-USIM'
|
||||
value_map = { 'SUCI-in-UE': False, 'SUCI-in-USIM': True }
|
||||
example_input = sorted(value_map.keys())[0]
|
||||
|
||||
class SuciRi(ConfigurableParameter):
|
||||
"""SUCI Routing Indicator as in section 4.4.11.11 of 3GPP TS 31.102"""
|
||||
name = '5G-SUCI-RI'
|
||||
allow_chars = '0123456789'
|
||||
min_len = 1
|
||||
max_len = 4
|
||||
allow_types = (str,)
|
||||
example_input = '0'
|
||||
default_source = param_source.ConstantSource
|
||||
|
||||
KEY_RI = "routing_indicator"
|
||||
|
||||
@classmethod
|
||||
def apply_val(cls, pes: ProfileElementSequence, val):
|
||||
for pe in pes.get_pes_for_type('df-5gs'):
|
||||
f_ri = pe.files.get('ef-routing-indicator', None)
|
||||
if f_ri is None:
|
||||
continue
|
||||
ef_ri = EF_Routing_Indicator()
|
||||
ri = ef_ri.decode_bin(f_ri.body)
|
||||
|
||||
ri[cls.KEY_RI] = str(val)
|
||||
|
||||
f_ri.body = ef_ri.encode_bin(ri)
|
||||
pe.file2pe(f_ri)
|
||||
|
||||
@classmethod
|
||||
def get_values_from_pes(cls, pes: ProfileElementSequence):
|
||||
for pe in pes.get_pes_for_type('df-5gs'):
|
||||
f_ri = pe.files.get('ef-routing-indicator', None)
|
||||
if f_ri is None:
|
||||
continue
|
||||
ef_ri = EF_Routing_Indicator()
|
||||
try:
|
||||
ri = ef_ri.decode_bin(f_ri.body)
|
||||
yield { cls.name: ri.get(cls.KEY_RI) }
|
||||
except:
|
||||
pass
|
||||
|
||||
class SuciCalcInfo(ConfigurableParameter):
|
||||
"""SUCI Calculation Information as in section 4.4.11.8 of 3GPP TS 31.102"""
|
||||
name = '5G-SUCI-CalcInfo'
|
||||
example_input = '{}'
|
||||
default_source = param_source.ConstantSource
|
||||
allow_types = (str,)
|
||||
max_len = 2000
|
||||
|
||||
@classmethod
|
||||
def validate_val(cls, val):
|
||||
val = super().validate_val(val)
|
||||
|
||||
if not val:
|
||||
raise ValueError("SUCI Calc Info value is empty -- should at least be an empty dict like '{}'")
|
||||
|
||||
# check that it is a dict something like
|
||||
# {
|
||||
# "prot_scheme_id_list": [
|
||||
# {"priority": 0, "identifier": 2, "key_index": 1},
|
||||
# {"priority": 1, "identifier": 1, "key_index": 2},
|
||||
# ],
|
||||
# "hnet_pubkey_list": [
|
||||
# {"hnet_pubkey_identifier": 27,
|
||||
# "hnet_pubkey": "0472DA71976234CE833A6907425867B82E074D44EF907DFB4B3E21C1C2256EBCD15A7DED52FCBB097A4ED250E036C7B9C8C7004C4EEDC4F068CD7BF8D3F900E3B4"},
|
||||
# {"hnet_pubkey_identifier": 30,
|
||||
# "hnet_pubkey": "5A8D38864820197C3394B92613B20B91633CBD897119273BF8E4A6F4EEC0A650"},
|
||||
# ],
|
||||
# }
|
||||
|
||||
try:
|
||||
d = json.loads(val)
|
||||
except json.decoder.JSONDecodeError as e:
|
||||
raise ValueError(f"Cannot parse SUCI Calc Info: {e}") from e
|
||||
|
||||
KEY_PSI_LIST = 'prot_scheme_id_list'
|
||||
KEY_HPK_LIST = 'hnet_pubkey_list'
|
||||
KEYS_D = set((KEY_HPK_LIST, KEY_PSI_LIST))
|
||||
KEYS_PSI = set(('identifier', 'key_index', 'priority'))
|
||||
KEYS_HPK = set(('hnet_pubkey_identifier', 'hnet_pubkey'))
|
||||
|
||||
if not (isinstance(d, dict)
|
||||
and set(d.keys()) == KEYS_D):
|
||||
raise ValueError(f"Unexpected structure in SUCI Calc Info: expected dict with entries {KEYS_D}")
|
||||
|
||||
psi = d.get(KEY_PSI_LIST, None)
|
||||
if not all((set(e.keys()) == KEYS_PSI) for e in psi):
|
||||
raise ValueError("Unexpected structure in SUCI Calc Info:"
|
||||
f" in {KEY_PSI_LIST}, expected dict with entries {KEYS_PSI}")
|
||||
|
||||
hpk = d.get(KEY_HPK_LIST, None)
|
||||
if not all((set(e.keys()) == KEYS_HPK) for e in hpk):
|
||||
raise ValueError("Unexpected structure in SUCI Calc Info:"
|
||||
f" in {KEY_HPK_LIST}, expected dict with entries {KEYS_HPK}")
|
||||
return d
|
||||
|
||||
@classmethod
|
||||
def _apply_suci(cls, pes: ProfileElementSequence, val, pe_type="df-5gs", pe_file="ef-suci-calc-info"):
|
||||
for pe in pes.get_pes_for_type(pe_type):
|
||||
f_sucici = pe.files.get(pe_file, None)
|
||||
if not f_sucici:
|
||||
continue
|
||||
ef_sucici = EF_SUCI_Calc_Info()
|
||||
f_sucici.body = ef_sucici.encode_bin(val)
|
||||
pe.file2pe(f_sucici)
|
||||
|
||||
@classmethod
|
||||
def apply_val(cls, pes: ProfileElementSequence, val):
|
||||
cls._apply_suci(pes, val, "df-5gs", "ef-suci-calc-info")
|
||||
cls._apply_suci(pes, val, "df-saip", "ef-suci-calc-info-usim")
|
||||
|
||||
@classmethod
|
||||
def _get_suci(cls, pes: ProfileElementSequence, pe_type="df-5gs", pe_file="ef-suci-calc-info"):
|
||||
for pe in pes.get_pes_for_type(pe_type):
|
||||
f_sucici = pe.files.get(pe_file, None)
|
||||
if not f_sucici:
|
||||
continue
|
||||
ef_sucici = EF_SUCI_Calc_Info()
|
||||
sucici = ef_sucici.decode_bin(f_sucici.body)
|
||||
|
||||
# normalize to string (bytes cannot go into json)
|
||||
for hnet_pubkey in sucici.get('hnet_pubkey_list', ()):
|
||||
val = hnet_pubkey['hnet_pubkey']
|
||||
if isinstance(val, bytes):
|
||||
val = b2h(val)
|
||||
hnet_pubkey['hnet_pubkey'] = val
|
||||
|
||||
yield { cls.name: json.dumps(sucici) }
|
||||
|
||||
@classmethod
|
||||
def get_values_from_pes(cls, pes: ProfileElementSequence):
|
||||
yield from cls._get_suci(pes, "df-5gs", "ef-suci-calc-info")
|
||||
yield from cls._get_suci(pes, "df-saip", "ef-suci-calc-info-usim")
|
||||
|
||||
@@ -152,7 +152,8 @@ class SimCard(SimCardBase):
|
||||
return sw
|
||||
|
||||
def update_smsp(self, smsp):
|
||||
data, sw = self._scc.update_record(EF['SMSP'], 1, rpad(smsp, 84))
|
||||
print("using update_smsp")
|
||||
data, sw = self._scc.update_record(EF['SMSP'], 1, smsp, leftpad=True)
|
||||
return sw
|
||||
|
||||
def update_ad(self, mnc=None, opmode=None, ofm=None, path=EF['AD']):
|
||||
|
||||
@@ -301,24 +301,54 @@ class LinkBaseTpdu(LinkBase):
|
||||
|
||||
prev_tpdu = tpdu
|
||||
data, sw = self.send_tpdu(tpdu)
|
||||
log.debug("T0: case #%u TPDU: %s => %s %s", case, tpdu, data or "(no data)", sw or "(no status word)")
|
||||
if sw is None:
|
||||
raise ValueError("no status word received")
|
||||
|
||||
# When we have sent the first APDU, the SW may indicate that there are response bytes
|
||||
# available. There are two SWs commonly used for this 9fxx (sim) and 61xx (usim), where
|
||||
# xx is the number of response bytes available.
|
||||
# See also:
|
||||
if sw is not None:
|
||||
while (sw[0:2] in ['9f', '61', '62', '63']):
|
||||
# SW1=9F: 3GPP TS 51.011 9.4.1, Responses to commands which are correctly executed
|
||||
# SW1=61: ISO/IEC 7816-4, Table 5 — General meaning of the interindustry values of SW1-SW2
|
||||
# SW1=62: ETSI TS 102 221 7.3.1.1.4 Clause 4b): 62xx, 63xx, 9xxx != 9000
|
||||
tpdu_gr = tpdu[0:2] + 'c00000' + sw[2:4]
|
||||
# After sending the APDU/TPDU the UICC/eUICC or SIM may response with a status word that indicates that further
|
||||
# TPDUs have to be sent in order to complete the task.
|
||||
if case == 4 or self.apdu_strict == False:
|
||||
# In case the APDU is a case #4 APDU, the UICC/eUICC/SIM may indicate that there is response data
|
||||
# available which has to be retrieved using a GET RESPONSE command TPDU.
|
||||
#
|
||||
# ETSI TS 102 221, section 7.3.1.1.4 is very cleare about the fact that the GET RESPONSE mechanism
|
||||
# shall only apply on case #4 APDUs but unfortunately it is impossible to distinguish between case #3
|
||||
# and case #4 when the APDU format is not strictly followed. In order to be able to detect case #4
|
||||
# correctly the Le byte (usually 0x00) must be present, is often forgotten. To avoid problems with
|
||||
# legacy scripts that use raw APDU strings, we will still loosely apply GET RESPONSE based on what
|
||||
# the status word indicates. Unless the user explicitly enables the strict mode (set apdu_strict true)
|
||||
while True:
|
||||
if sw in ['9000', '9100']:
|
||||
# A status word of 9000 (or 9100 in case there is pending data from a proactive SIM command)
|
||||
# indicates that either no response data was returnd or all response data has been retrieved
|
||||
# successfully. We may discontinue the processing at this point.
|
||||
break;
|
||||
if sw[0:2] in ['61', '9f']:
|
||||
# A status word of 61xx or 9fxx indicates that there is (still) response data available. We
|
||||
# send a GET RESPONSE command with the length value indicated in the second byte of the status
|
||||
# word. (see also ETSI TS 102 221, section 7.3.1.1.4, clause 4a and 3GPP TS 51.011 9.4.1 and
|
||||
# ISO/IEC 7816-4, Table 5)
|
||||
le_gr = sw[2:4]
|
||||
elif sw[0:2] in ['62', '63']:
|
||||
# There are corner cases (status word is 62xx or 63xx) where the UICC/eUICC/SIM asks us
|
||||
# to send a dummy GET RESPONSE command. We send a GET RESPONSE command with a length of 0.
|
||||
# (see also ETSI TS 102 221, section 7.3.1.1.4, clause 4b and ETSI TS 151 011, section 9.4.1)
|
||||
le_gr = '00'
|
||||
else:
|
||||
# A status word other then the ones covered by the above logic may indicate an error. In this
|
||||
# case we will discontinue the processing as well.
|
||||
# (see also ETSI TS 102 221, section 7.3.1.1.4, clause 4c)
|
||||
break
|
||||
tpdu_gr = tpdu[0:2] + 'c00000' + le_gr
|
||||
prev_tpdu = tpdu_gr
|
||||
d, sw = self.send_tpdu(tpdu_gr)
|
||||
data += d
|
||||
if sw[0:2] == '6c':
|
||||
# SW1=6C: ETSI TS 102 221 Table 7.1: Procedure byte coding
|
||||
tpdu_gr = prev_tpdu[0:8] + sw[2:4]
|
||||
data, sw = self.send_tpdu(tpdu_gr)
|
||||
data_gr, sw = self.send_tpdu(tpdu_gr)
|
||||
log.debug("T0: GET RESPONSE TPDU: %s => %s %s", tpdu_gr, data_gr or "(no data)", sw or "(no status word)")
|
||||
data += data_gr
|
||||
if sw[0:2] == '6c':
|
||||
# SW1=6C: ETSI TS 102 221 Table 7.1: Procedure byte coding
|
||||
tpdu_gr = prev_tpdu[0:8] + sw[2:4]
|
||||
data, sw = self.send_tpdu(tpdu_gr)
|
||||
log.debug("T0: repated case #%u TPDU: %s => %s %s", case, tpdu_gr, data or "(no data)", sw or "(no status word)")
|
||||
|
||||
return data, sw
|
||||
|
||||
|
||||
@@ -327,7 +327,7 @@ class EF_SUCI_Calc_Info(TransparentEF):
|
||||
"""conversion method to generate list of {hnet_pubkey_identifier, hnet_pubkey} dicts
|
||||
from flat [{hnet_pubkey_identifier: }, {net_pubkey: }, ...] list"""
|
||||
out = []
|
||||
while l:
|
||||
while len(l):
|
||||
a = l.pop(0)
|
||||
b = l.pop(0)
|
||||
z = {**a, **b}
|
||||
|
||||
@@ -251,6 +251,16 @@ class EF_SMSP(LinFixedEF):
|
||||
"numbering_plan_id": "isdn_e164" },
|
||||
"call_number": "4915790109999" },
|
||||
"tp_pid": b"\x00", "tp_dcs": b"\x00", "tp_vp_minutes": 4320 } ),
|
||||
( 'e1ffffffffffffffffffffffff0891945197109099f9ffffff0000a9',
|
||||
{ "alpha_id": "", "parameter_indicators": { "tp_dest_addr": False, "tp_sc_addr": True,
|
||||
"tp_pid": True, "tp_dcs": True, "tp_vp": True },
|
||||
"tp_dest_addr": { "length": 255, "ton_npi": { "ext": True, "type_of_number": "reserved_for_extension",
|
||||
"numbering_plan_id": "reserved_for_extension" },
|
||||
"call_number": "" },
|
||||
"tp_sc_addr": { "length": 8, "ton_npi": { "ext": True, "type_of_number": "international",
|
||||
"numbering_plan_id": "isdn_e164" },
|
||||
"call_number": "4915790109999" },
|
||||
"tp_pid": b"\x00", "tp_dcs": b"\x00", "tp_vp_minutes": 4320 } ),
|
||||
( '454e6574776f726b73fffffffffffffff1ffffffffffffffffffffffffffffffffffffffffffffffff0000a7',
|
||||
{ "alpha_id": "ENetworks", "parameter_indicators": { "tp_dest_addr": False, "tp_sc_addr": True,
|
||||
"tp_pid": True, "tp_dcs": True, "tp_vp": False },
|
||||
@@ -331,7 +341,8 @@ class EF_SMSP(LinFixedEF):
|
||||
'ton_npi'/TonNpi, 'call_number'/PaddedBcdAdapter(Rpad(Bytes(10))))
|
||||
DestAddr = Struct('length'/Rebuild(Int8ub, lambda ctx: EF_SMSP.dest_addr_len(ctx)),
|
||||
'ton_npi'/TonNpi, 'call_number'/PaddedBcdAdapter(Rpad(Bytes(10))))
|
||||
self._construct = Struct('alpha_id'/COptional(GsmOrUcs2Adapter(Rpad(Bytes(this._.total_len-28)))),
|
||||
# (see comment below)
|
||||
self._construct = Struct('alpha_id'/GsmOrUcs2Adapter(Rpad(Bytes(this._.total_len-28))),
|
||||
'parameter_indicators'/InvertAdapter(BitStruct(
|
||||
Const(7, BitsInteger(3)),
|
||||
'tp_vp'/Flag,
|
||||
@@ -345,6 +356,25 @@ class EF_SMSP(LinFixedEF):
|
||||
'tp_dcs'/Bytes(1),
|
||||
'tp_vp_minutes'/EF_SMSP.ValidityPeriodAdapter(Byte))
|
||||
|
||||
# Ensure 'alpha_id' is always present
|
||||
def encode_record_hex(self, abstract_data: dict, record_nr: int, total_len: int = None) -> str:
|
||||
# Problem: TS 51.011 Section 10.5.6 describes the 'alpha_id' field as optional. However, this is only true
|
||||
# at the time when the record length of the file is set up in the file system. A card manufacturer may decide
|
||||
# to remove the field by setting the record length to 28. Likewise, the card manaufacturer may also decide to
|
||||
# set the field to a distinct length by setting the record length to a value greater than 28 (e.g. 14 bytes
|
||||
# 'alpha_id' + 28 bytes). Due to the fixed nature of the record length, this eventually means that in practice
|
||||
# 'alpha_id' is a mandatory field with a fixed length.
|
||||
#
|
||||
# Due to the problematic specification of 'alpha_id' as a pseudo-optional field at the beginning of a
|
||||
# fixed-size memory, the construct definition in self._construct has been incorrectly implemented and the field
|
||||
# has been marked as COptional. We may correct the problem by removing COptional. But to maintain compatibility,
|
||||
# we then have to ensure that in case the field is not provided (None), it is set to an empty string ('').
|
||||
#
|
||||
# See also ts_31_102.py, class EF_OCI for a correct example.
|
||||
if abstract_data['alpha_id'] is None:
|
||||
abstract_data['alpha_id'] = ''
|
||||
return super().encode_record_hex(abstract_data, record_nr, total_len)
|
||||
|
||||
# TS 51.011 Section 10.5.7
|
||||
class EF_SMSS(TransparentEF):
|
||||
class MemCapAdapter(Adapter):
|
||||
|
||||
@@ -5,7 +5,7 @@ ICCID: 8988219000000117833
|
||||
IMSI: 001010000000111
|
||||
GID1: ffffffffffffffff
|
||||
GID2: ffffffffffffffff
|
||||
SMSP: e1ffffffffffffffffffffffff0581005155f5ffffffffffff000000ffffffffffffffffffffffffffff
|
||||
SMSP: ffffffffffffffffffffffffffffe1ffffffffffffffffffffffff0581005155f5ffffffffffff000000
|
||||
SMSC: 0015555
|
||||
SPN: Fairwaves
|
||||
Show in HPLMN: False
|
||||
|
||||
@@ -5,7 +5,7 @@ ICCID: 89445310150011013678
|
||||
IMSI: 001010000000102
|
||||
GID1: Can't read file -- SW match failed! Expected 9000 and got 6a82.
|
||||
GID2: Can't read file -- SW match failed! Expected 9000 and got 6a82.
|
||||
SMSP: e1ffffffffffffffffffffffff0581005155f5ffffffffffff000000ffffffffffffffffffffffffffff
|
||||
SMSP: ffffffffffffffffffffffffffffe1ffffffffffffffffffffffff0581005155f5ffffffffffff000000
|
||||
SMSC: 0015555
|
||||
SPN: wavemobile
|
||||
Show in HPLMN: False
|
||||
|
||||
@@ -7,10 +7,24 @@ set apdu_strict true
|
||||
# No command data field, No response data field present
|
||||
apdu 00700001 --expect-sw 9000 --expect-response-regex '^$'
|
||||
|
||||
# Case #1: (verify pin)
|
||||
# This command returns the number of remaining authentication attempts in the
|
||||
# form of a status that has the form 63cX, where X is the number of remaining
|
||||
# attempts. Such a status word can be easily confused with the response to a
|
||||
# case #4 APDU. This test checks if the transport layer correctly distinguishes
|
||||
# the between APDU case #1 and APDU case #4.
|
||||
apdu 0020000A --expect-sw 63c? --expect-response-regex '^$'
|
||||
|
||||
# Case #2: (status)
|
||||
# No command data field, Response data field present
|
||||
apdu 80F2000000 --expect-sw 9000 --expect-response-regex '^[a-fA-F0-9]+$'
|
||||
|
||||
# Case #2: (verify pin)
|
||||
# (see also above). This test checks if the transport layer is also able to
|
||||
# distinguish correctly between APDU case #2 (with zero length response) and
|
||||
# APDU case #4.
|
||||
apdu 0020000A00 --expect-sw 63c? --expect-response-regex '^$'
|
||||
|
||||
# Case #3: (terminal capability)
|
||||
# Command data field present, No response data field
|
||||
apdu 80AA000005a903830180 --expect-sw 9000 --expect-response-regex '^$'
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Utility to verify the functionality of pySim-trace.py
|
||||
# Utility to verify the functionality of pySim-smpp2sim.py
|
||||
#
|
||||
# (C) 2026 by sysmocom - s.f.m.c. GmbH
|
||||
# All Rights Reserved
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
../../smdpp-data
|
||||
@@ -1,522 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# (C) 2025 by sysmocom - s.f.m.c. GmbH <info@sysmocom.de>
|
||||
#
|
||||
# Author: Neels Hofmeyr
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import io
|
||||
import sys
|
||||
import unittest
|
||||
import io
|
||||
import json
|
||||
from importlib import resources
|
||||
from osmocom.utils import hexstr
|
||||
from pySim.esim.saip import ProfileElementSequence
|
||||
import pySim.esim.saip.personalization as p13n
|
||||
import smdpp_data.upp
|
||||
|
||||
import xo
|
||||
update_expected_output = False
|
||||
|
||||
def valstr(val):
|
||||
if isinstance(val, io.BytesIO):
|
||||
val = val.getvalue()
|
||||
if isinstance(val, bytearray):
|
||||
val = bytes(val)
|
||||
return f'{val!r}'
|
||||
|
||||
def valtypestr(val):
|
||||
if isinstance(val, dict):
|
||||
types = []
|
||||
for v in val.values():
|
||||
types.append(f'{type(v).__name__}')
|
||||
|
||||
val_type = '{' + ', '.join(types) + '}'
|
||||
else:
|
||||
val_type = f'{type(val).__name__}'
|
||||
return f'{valstr(val)}:{val_type}'
|
||||
|
||||
class ConfigurableParameterTest(unittest.TestCase):
|
||||
|
||||
def test_parameters(self):
|
||||
|
||||
upp_fnames = (
|
||||
'TS48v5_SAIP2.1A_NoBERTLV.der',
|
||||
'TS48v5_SAIP2.3_BERTLV_SUCI.der',
|
||||
'TS48v5_SAIP2.1B_NoBERTLV.der',
|
||||
'TS48v5_SAIP2.3_NoBERTLV.der',
|
||||
)
|
||||
|
||||
class Paramtest:
|
||||
iff_present_default = False
|
||||
def __init__(self, param_cls, val, expect_val, expect_clean_val=None, iff_present=None):
|
||||
self.param_cls = param_cls
|
||||
self.val = val
|
||||
self.expect_clean_val = expect_clean_val
|
||||
self.expect_val = expect_val
|
||||
if iff_present is None:
|
||||
iff_present = Paramtest.iff_present_default
|
||||
self.iff_present = iff_present
|
||||
|
||||
param_tests = [
|
||||
Paramtest(param_cls=p13n.Imsi, val='123456',
|
||||
expect_clean_val=str('123456'),
|
||||
expect_val={'IMSI': hexstr('123456'),
|
||||
'IMSI-ACC': '0040'}),
|
||||
Paramtest(param_cls=p13n.Imsi, val=int(123456),
|
||||
expect_val={'IMSI': hexstr('123456'),
|
||||
'IMSI-ACC': '0040'}),
|
||||
|
||||
Paramtest(param_cls=p13n.Imsi, val='123456789012345',
|
||||
expect_clean_val=str('123456789012345'),
|
||||
expect_val={'IMSI': hexstr('123456789012345'),
|
||||
'IMSI-ACC': '0020'}),
|
||||
Paramtest(param_cls=p13n.Imsi, val=int(123456789012345),
|
||||
expect_val={'IMSI': hexstr('123456789012345'),
|
||||
'IMSI-ACC': '0020'}),
|
||||
|
||||
Paramtest(param_cls=p13n.Puk1,
|
||||
val='12345678',
|
||||
expect_clean_val=b'12345678',
|
||||
expect_val='12345678'),
|
||||
Paramtest(param_cls=p13n.Puk1,
|
||||
val=int(12345678),
|
||||
expect_clean_val=b'12345678',
|
||||
expect_val='12345678'),
|
||||
|
||||
Paramtest(param_cls=p13n.Puk2,
|
||||
val='12345678',
|
||||
expect_clean_val=b'12345678',
|
||||
expect_val='12345678'),
|
||||
|
||||
Paramtest(param_cls=p13n.Pin1,
|
||||
val='1234',
|
||||
expect_clean_val=b'1234\xff\xff\xff\xff',
|
||||
expect_val='1234'),
|
||||
Paramtest(param_cls=p13n.Pin1,
|
||||
val='123456',
|
||||
expect_clean_val=b'123456\xff\xff',
|
||||
expect_val='123456'),
|
||||
Paramtest(param_cls=p13n.Pin1,
|
||||
val='12345678',
|
||||
expect_clean_val=b'12345678',
|
||||
expect_val='12345678'),
|
||||
Paramtest(param_cls=p13n.Pin1,
|
||||
val=int(1234),
|
||||
expect_clean_val=b'1234\xff\xff\xff\xff',
|
||||
expect_val='1234'),
|
||||
Paramtest(param_cls=p13n.Pin1,
|
||||
val=int(123456),
|
||||
expect_clean_val=b'123456\xff\xff',
|
||||
expect_val='123456'),
|
||||
Paramtest(param_cls=p13n.Pin1,
|
||||
val=int(12345678),
|
||||
expect_clean_val=b'12345678',
|
||||
expect_val='12345678'),
|
||||
|
||||
Paramtest(param_cls=p13n.Adm1,
|
||||
val='1234',
|
||||
expect_clean_val=b'1234\xff\xff\xff\xff',
|
||||
expect_val='1234'),
|
||||
Paramtest(param_cls=p13n.Adm1,
|
||||
val='123456',
|
||||
expect_clean_val=b'123456\xff\xff',
|
||||
expect_val='123456'),
|
||||
Paramtest(param_cls=p13n.Adm1,
|
||||
val='12345678',
|
||||
expect_clean_val=b'12345678',
|
||||
expect_val='12345678'),
|
||||
Paramtest(param_cls=p13n.Adm1,
|
||||
val=int(123456),
|
||||
expect_clean_val=b'123456\xff\xff',
|
||||
expect_val='123456'),
|
||||
|
||||
Paramtest(param_cls=p13n.AlgorithmID,
|
||||
val='Milenage',
|
||||
expect_clean_val=1,
|
||||
expect_val='Milenage'),
|
||||
Paramtest(param_cls=p13n.AlgorithmID,
|
||||
val='TUAK',
|
||||
expect_clean_val=2,
|
||||
expect_val='TUAK'),
|
||||
Paramtest(param_cls=p13n.AlgorithmID,
|
||||
val='usim-test',
|
||||
expect_clean_val=3,
|
||||
expect_val='usim-test'),
|
||||
|
||||
Paramtest(param_cls=p13n.AlgorithmID,
|
||||
val=1,
|
||||
expect_clean_val=1,
|
||||
expect_val='Milenage'),
|
||||
Paramtest(param_cls=p13n.AlgorithmID,
|
||||
val=2,
|
||||
expect_clean_val=2,
|
||||
expect_val='TUAK'),
|
||||
Paramtest(param_cls=p13n.AlgorithmID,
|
||||
val=3,
|
||||
expect_clean_val=3,
|
||||
expect_val='usim-test'),
|
||||
|
||||
Paramtest(param_cls=p13n.K,
|
||||
val='01020304050607080910111213141516',
|
||||
expect_clean_val=b'\x01\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15\x16',
|
||||
expect_val='01020304050607080910111213141516'),
|
||||
Paramtest(param_cls=p13n.K,
|
||||
val=b'\x01\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15\x16',
|
||||
expect_clean_val=b'\x01\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15\x16',
|
||||
expect_val='01020304050607080910111213141516'),
|
||||
Paramtest(param_cls=p13n.K,
|
||||
val=bytearray(b'\x01\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15\x16'),
|
||||
expect_clean_val=b'\x01\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15\x16',
|
||||
expect_val='01020304050607080910111213141516'),
|
||||
Paramtest(param_cls=p13n.K,
|
||||
val=io.BytesIO(b'\x01\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15\x16'),
|
||||
expect_clean_val=b'\x01\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15\x16',
|
||||
expect_val='01020304050607080910111213141516'),
|
||||
Paramtest(param_cls=p13n.K,
|
||||
val=int(11020304050607080910111213141516),
|
||||
expect_clean_val=b'\x11\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15\x16',
|
||||
expect_val='11020304050607080910111213141516'),
|
||||
|
||||
Paramtest(param_cls=p13n.Opc,
|
||||
val='01020304050607080910111213141516',
|
||||
expect_clean_val=b'\x01\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15\x16',
|
||||
expect_val='01020304050607080910111213141516'),
|
||||
Paramtest(param_cls=p13n.Opc,
|
||||
val=b'\x01\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15\x16',
|
||||
expect_clean_val=b'\x01\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15\x16',
|
||||
expect_val='01020304050607080910111213141516'),
|
||||
Paramtest(param_cls=p13n.Opc,
|
||||
val=bytearray(b'\x01\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15\x16'),
|
||||
expect_clean_val=b'\x01\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15\x16',
|
||||
expect_val='01020304050607080910111213141516'),
|
||||
Paramtest(param_cls=p13n.Opc,
|
||||
val=io.BytesIO(b'\x01\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15\x16'),
|
||||
expect_clean_val=b'\x01\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15\x16',
|
||||
expect_val='01020304050607080910111213141516'),
|
||||
|
||||
Paramtest(param_cls=p13n.SmspTpScAddr,
|
||||
val='+1234567',
|
||||
expect_clean_val=(True, '1234567'),
|
||||
expect_val='+1234567'),
|
||||
Paramtest(param_cls=p13n.SmspTpScAddr,
|
||||
val=1234567,
|
||||
expect_clean_val=(False, '1234567'),
|
||||
expect_val='1234567'),
|
||||
|
||||
Paramtest(param_cls=p13n.TuakNumberOfKeccak,
|
||||
val='123',
|
||||
expect_clean_val=123,
|
||||
expect_val='123'),
|
||||
Paramtest(param_cls=p13n.TuakNumberOfKeccak,
|
||||
val=123,
|
||||
expect_clean_val=123,
|
||||
expect_val='123'),
|
||||
|
||||
Paramtest(param_cls=p13n.MilenageRotationConstants,
|
||||
val='0a 0b 0c 01 02',
|
||||
expect_clean_val=b'\x0a\x0b\x0c\x01\x02',
|
||||
expect_val='0a0b0c0102'),
|
||||
Paramtest(param_cls=p13n.MilenageRotationConstants,
|
||||
val=b'\x0a\x0b\x0c\x01\x02',
|
||||
expect_clean_val=b'\x0a\x0b\x0c\x01\x02',
|
||||
expect_val='0a0b0c0102'),
|
||||
Paramtest(param_cls=p13n.MilenageRotationConstants,
|
||||
val=bytearray(b'\x0a\x0b\x0c\x01\x02'),
|
||||
expect_clean_val=b'\x0a\x0b\x0c\x01\x02',
|
||||
expect_val='0a0b0c0102'),
|
||||
|
||||
Paramtest(param_cls=p13n.MilenageXoringConstants,
|
||||
val='aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
|
||||
' bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb'
|
||||
' cccccccccccccccccccccccccccccccc'
|
||||
' 11111111111111111111111111111111'
|
||||
' 22222222222222222222222222222222',
|
||||
expect_clean_val=b'\xaa' * 16
|
||||
+ b'\xbb' * 16
|
||||
+ b'\xcc' * 16
|
||||
+ b'\x11' * 16
|
||||
+ b'\x22' * 16,
|
||||
expect_val='aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
|
||||
'bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb'
|
||||
'cccccccccccccccccccccccccccccccc'
|
||||
'11111111111111111111111111111111'
|
||||
'22222222222222222222222222222222'),
|
||||
Paramtest(param_cls=p13n.MilenageXoringConstants,
|
||||
val=b'\xaa' * 16
|
||||
+ b'\xbb' * 16
|
||||
+ b'\xcc' * 16
|
||||
+ b'\x11' * 16
|
||||
+ b'\x22' * 16,
|
||||
expect_clean_val=b'\xaa' * 16
|
||||
+ b'\xbb' * 16
|
||||
+ b'\xcc' * 16
|
||||
+ b'\x11' * 16
|
||||
+ b'\x22' * 16,
|
||||
expect_val='aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
|
||||
'bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb'
|
||||
'cccccccccccccccccccccccccccccccc'
|
||||
'11111111111111111111111111111111'
|
||||
'22222222222222222222222222222222'),
|
||||
|
||||
|
||||
Paramtest(param_cls=p13n.MncLen,
|
||||
val='2',
|
||||
expect_clean_val=2,
|
||||
expect_val='2'),
|
||||
Paramtest(param_cls=p13n.MncLen,
|
||||
val=3,
|
||||
expect_clean_val=3,
|
||||
expect_val='3'),
|
||||
]
|
||||
|
||||
Paramtest.iff_present_default = True
|
||||
|
||||
sucici = {
|
||||
"prot_scheme_id_list": [
|
||||
{"priority": 0, "identifier": 2, "key_index": 1},
|
||||
{"priority": 1, "identifier": 1, "key_index": 2},
|
||||
],
|
||||
"hnet_pubkey_list": [
|
||||
{"hnet_pubkey_identifier": 27,
|
||||
"hnet_pubkey": "0472da71976234ce833a6907425867b82e074d44ef907dfb4b3e21c1c2256ebcd15a7ded52fcbb097a4ed250e036c7b9c8c7004c4eedc4f068cd7bf8d3f900e3b4"},
|
||||
{"hnet_pubkey_identifier": 30,
|
||||
"hnet_pubkey": "5a8d38864820197c3394b92613b20b91633cbd897119273bf8e4a6f4eec0a650"},
|
||||
],
|
||||
}
|
||||
|
||||
param_tests.extend([
|
||||
Paramtest(param_cls=p13n.SuciActive, val='SUCI-on',
|
||||
expect_clean_val=True,
|
||||
expect_val={'5G-SUCI-active': 'SUCI-on'}),
|
||||
Paramtest(param_cls=p13n.SuciActive, val='SUCI-off',
|
||||
expect_clean_val=False,
|
||||
expect_val={'5G-SUCI-active': 'SUCI-off'}),
|
||||
|
||||
Paramtest(param_cls=p13n.SuciInUsim, val='SUCI-in-UE',
|
||||
expect_clean_val=False,
|
||||
expect_val={'5G-SUCI-in-USIM': 'SUCI-in-UE'}),
|
||||
Paramtest(param_cls=p13n.SuciInUsim, val='SUCI-in-USIM',
|
||||
expect_clean_val=True,
|
||||
expect_val={'5G-SUCI-in-USIM': 'SUCI-in-USIM'}),
|
||||
|
||||
Paramtest(param_cls=p13n.SuciRi, val='123',
|
||||
expect_clean_val='123',
|
||||
expect_val={'5G-SUCI-RI': '123'}),
|
||||
Paramtest(param_cls=p13n.SuciRi, val='0',
|
||||
expect_clean_val='0',
|
||||
expect_val={'5G-SUCI-RI': '0'}),
|
||||
Paramtest(param_cls=p13n.SuciRi, val='9999',
|
||||
expect_clean_val='9999',
|
||||
expect_val={'5G-SUCI-RI': '9999'}),
|
||||
|
||||
Paramtest(param_cls=p13n.SuciCalcInfo,
|
||||
val=json.dumps(sucici),
|
||||
expect_clean_val=sucici,
|
||||
expect_val={'5G-SUCI-CalcInfo': json.dumps(sucici)}),
|
||||
|
||||
])
|
||||
|
||||
Paramtest.iff_present_default = False
|
||||
|
||||
for sdkey_cls in (
|
||||
# thin out the number of tests, as a compromise between completeness and test runtime
|
||||
p13n.SdKeyScp02Kvn20AesDek,
|
||||
#p13n.SdKeyScp02Kvn20AesEnc,
|
||||
#p13n.SdKeyScp02Kvn20AesMac,
|
||||
#p13n.SdKeyScp02Kvn21AesDek,
|
||||
p13n.SdKeyScp02Kvn21AesEnc,
|
||||
#p13n.SdKeyScp02Kvn21AesMac,
|
||||
#p13n.SdKeyScp02Kvn22AesDek,
|
||||
#p13n.SdKeyScp02Kvn22AesEnc,
|
||||
p13n.SdKeyScp02Kvn22AesMac,
|
||||
#p13n.SdKeyScp02KvnffAesDek,
|
||||
#p13n.SdKeyScp02KvnffAesEnc,
|
||||
#p13n.SdKeyScp02KvnffAesMac,
|
||||
p13n.SdKeyScp03Kvn30AesDek,
|
||||
#p13n.SdKeyScp03Kvn30AesEnc,
|
||||
#p13n.SdKeyScp03Kvn30AesMac,
|
||||
#p13n.SdKeyScp03Kvn31AesDek,
|
||||
p13n.SdKeyScp03Kvn31AesEnc,
|
||||
#p13n.SdKeyScp03Kvn31AesMac,
|
||||
#p13n.SdKeyScp03Kvn32AesDek,
|
||||
#p13n.SdKeyScp03Kvn32AesEnc,
|
||||
p13n.SdKeyScp03Kvn32AesMac,
|
||||
#p13n.SdKeyScp80Kvn01AesDek,
|
||||
#p13n.SdKeyScp80Kvn01AesEnc,
|
||||
#p13n.SdKeyScp80Kvn01AesMac,
|
||||
p13n.SdKeyScp80Kvn01DesDek,
|
||||
#p13n.SdKeyScp80Kvn01DesEnc,
|
||||
#p13n.SdKeyScp80Kvn01DesMac,
|
||||
#p13n.SdKeyScp80Kvn02AesDek,
|
||||
p13n.SdKeyScp80Kvn02AesEnc,
|
||||
#p13n.SdKeyScp80Kvn02AesMac,
|
||||
#p13n.SdKeyScp80Kvn02DesDek,
|
||||
#p13n.SdKeyScp80Kvn02DesEnc,
|
||||
p13n.SdKeyScp80Kvn02DesMac,
|
||||
#p13n.SdKeyScp80Kvn03AesDek,
|
||||
#p13n.SdKeyScp80Kvn03AesEnc,
|
||||
#p13n.SdKeyScp80Kvn03AesMac,
|
||||
p13n.SdKeyScp80Kvn03DesDek,
|
||||
#p13n.SdKeyScp80Kvn03DesEnc,
|
||||
#p13n.SdKeyScp80Kvn03DesMac,
|
||||
#p13n.SdKeyScp81Kvn40AesDek,
|
||||
p13n.SdKeyScp81Kvn40DesDek,
|
||||
#p13n.SdKeyScp81Kvn40Tlspsk,
|
||||
#p13n.SdKeyScp81Kvn41AesDek,
|
||||
#p13n.SdKeyScp81Kvn41DesDek,
|
||||
p13n.SdKeyScp81Kvn41Tlspsk,
|
||||
#p13n.SdKeyScp81Kvn42AesDek,
|
||||
#p13n.SdKeyScp81Kvn42DesDek,
|
||||
#p13n.SdKeyScp81Kvn42Tlspsk,
|
||||
):
|
||||
|
||||
for key_len in sdkey_cls.allow_len:
|
||||
val = '0102030405060708091011121314151617181920212223242526272829303132'
|
||||
expect_clean_val = (b'\x01\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15\x16'
|
||||
b'\x17\x18\x19\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x30\x31\x32')
|
||||
expect_val = '0102030405060708091011121314151617181920212223242526272829303132'
|
||||
|
||||
val = val[:key_len*2]
|
||||
expect_clean_val = expect_clean_val[:key_len]
|
||||
expect_val = val
|
||||
|
||||
param_tests.append(Paramtest(param_cls=sdkey_cls, val=val, expect_clean_val=expect_clean_val, expect_val=expect_val))
|
||||
|
||||
# test bytes input
|
||||
val = expect_clean_val
|
||||
param_tests.append(Paramtest(param_cls=sdkey_cls, val=val, expect_clean_val=expect_clean_val, expect_val=expect_val))
|
||||
|
||||
# test bytearray input
|
||||
val = bytearray(expect_clean_val)
|
||||
param_tests.append(Paramtest(param_cls=sdkey_cls, val=val, expect_clean_val=expect_clean_val, expect_val=expect_val))
|
||||
|
||||
# test BytesIO input
|
||||
val = io.BytesIO(expect_clean_val)
|
||||
param_tests.append(Paramtest(param_cls=sdkey_cls, val=val, expect_clean_val=expect_clean_val, expect_val=expect_val))
|
||||
|
||||
if key_len == 16:
|
||||
# test huge integer input.
|
||||
# needs to start with nonzero.. stupid
|
||||
val = 11020304050607080910111213141516
|
||||
expect_clean_val = (b'\x11\x02\x03\x04\x05\x06\x07\x08\x09\x10\x11\x12\x13\x14\x15\x16')
|
||||
expect_val = '11020304050607080910111213141516'
|
||||
param_tests.append(Paramtest(param_cls=sdkey_cls, val=val, expect_clean_val=expect_clean_val, expect_val=expect_val))
|
||||
|
||||
outputs = []
|
||||
|
||||
for upp_fname in upp_fnames:
|
||||
test_idx = -1
|
||||
try:
|
||||
|
||||
der = resources.read_binary(smdpp_data.upp, upp_fname)
|
||||
|
||||
for t in param_tests:
|
||||
test_idx += 1
|
||||
testlog = []
|
||||
testlog.append(f'{upp_fname} {t.param_cls.__name__}(val={valtypestr(t.val)})')
|
||||
|
||||
param = None
|
||||
try:
|
||||
param = t.param_cls()
|
||||
param.input_value = t.val
|
||||
param.validate()
|
||||
except ValueError as e:
|
||||
raise ValueError(f'{" ".join(testlog)}: {e}') from e
|
||||
|
||||
clean_val = param.value
|
||||
testlog.append(f'clean_val={valtypestr(clean_val)}')
|
||||
if t.expect_clean_val is not None and t.expect_clean_val != clean_val:
|
||||
raise ValueError(f'{" ".join(testlog)}: expected'
|
||||
f' expect_clean_val={valtypestr(t.expect_clean_val)}')
|
||||
|
||||
# on my laptop, deepcopy is about 30% slower than decoding the DER from scratch:
|
||||
# pes = copy.deepcopy(orig_pes)
|
||||
pes = ProfileElementSequence.from_der(der)
|
||||
|
||||
found = list((t.param_cls.get_value_from_pes(pes) or {}).values())
|
||||
testlog.append(f"previous value: {found}")
|
||||
|
||||
if t.iff_present and not found:
|
||||
testlog.append("skipping, param not in template.")
|
||||
output = "\nskip: " + "\n ".join(testlog)
|
||||
outputs.append(output)
|
||||
print(output)
|
||||
continue
|
||||
|
||||
try:
|
||||
param.apply(pes)
|
||||
except ValueError as e:
|
||||
raise ValueError(f'{" ".join(testlog)} apply_val(clean_val): {e}') from e
|
||||
|
||||
changed_der = pes.to_der()
|
||||
|
||||
pes2 = ProfileElementSequence.from_der(changed_der)
|
||||
|
||||
read_back_val = t.param_cls.get_value_from_pes(pes2)
|
||||
|
||||
# compose log string to show the precise type of dict values
|
||||
if isinstance(read_back_val, dict):
|
||||
types = set()
|
||||
for v in read_back_val.values():
|
||||
types.add(f'{type(v).__name__}')
|
||||
|
||||
read_back_val_type = '{' + ', '.join(types) + '}'
|
||||
else:
|
||||
read_back_val_type = f'{type(read_back_val).__name__}'
|
||||
|
||||
testlog.append(f'read_back_val={valtypestr(read_back_val)}')
|
||||
|
||||
if isinstance(read_back_val, dict) and not t.param_cls.get_name() in read_back_val.keys():
|
||||
raise ValueError(f'{" ".join(testlog)}: expected to find name {t.param_cls.get_name()!r} in read_back_val')
|
||||
|
||||
expect_val = t.expect_val
|
||||
if not isinstance(expect_val, dict):
|
||||
expect_val = { t.param_cls.get_name(): expect_val }
|
||||
if read_back_val != expect_val:
|
||||
raise ValueError(f'{" ".join(testlog)}: expected {expect_val=!r}:{type(t.expect_val).__name__}')
|
||||
|
||||
output = "\nok: " + "\n ".join(testlog)
|
||||
outputs.append(output)
|
||||
print(output)
|
||||
|
||||
except Exception as e:
|
||||
raise RuntimeError(f'Error while testing UPP {upp_fname} {test_idx=}: {e}') from e
|
||||
|
||||
output = '\n'.join(outputs) + '\n'
|
||||
xo_name = 'test_configurable_parameters'
|
||||
if update_expected_output:
|
||||
with resources.path(xo, xo_name) as xo_path:
|
||||
with open(xo_path, 'w', encoding='utf-8') as f:
|
||||
f.write(output)
|
||||
else:
|
||||
xo_str = resources.read_text(xo, xo_name)
|
||||
if xo_str != output:
|
||||
at = 0
|
||||
while at < len(output):
|
||||
if output[at] == xo_str[at]:
|
||||
at += 1
|
||||
continue
|
||||
break
|
||||
|
||||
raise RuntimeError(f'output differs from expected output at position {at}: "{output[at:at+20]}" != "{xo_str[at:at+20]}"')
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if '-u' in sys.argv:
|
||||
update_expected_output = True
|
||||
sys.argv.remove('-u')
|
||||
unittest.main()
|
||||
@@ -21,7 +21,7 @@ import copy
|
||||
from osmocom.utils import h2b, b2h
|
||||
|
||||
from pySim.esim.saip import *
|
||||
from pySim.esim.saip import personalization
|
||||
from pySim.esim.saip.personalization import *
|
||||
from pprint import pprint as pp
|
||||
|
||||
|
||||
@@ -55,56 +55,14 @@ class SaipTest(unittest.TestCase):
|
||||
def test_personalization(self):
|
||||
"""Test some of the personalization operations."""
|
||||
pes = copy.deepcopy(self.pes)
|
||||
params = [personalization.Puk1('01234567'),
|
||||
personalization.Puk2(98765432),
|
||||
personalization.Pin1('1111'),
|
||||
personalization.Pin2(2222),
|
||||
personalization.Adm1('11111111'),
|
||||
personalization.K(h2b('000102030405060708090a0b0c0d0e0f')),
|
||||
personalization.Opc(h2b('101112131415161718191a1b1c1d1e1f'))]
|
||||
params = [Puk1('01234567'), Puk2(98765432), Pin1('1111'), Pin2(2222), Adm1('11111111'),
|
||||
K(h2b('000102030405060708090a0b0c0d0e0f')), Opc(h2b('101112131415161718191a1b1c1d1e1f'))]
|
||||
for p in params:
|
||||
p.validate()
|
||||
p.apply(pes)
|
||||
# TODO: we don't actually test the results here, but we just verify there is no exception
|
||||
pes.to_der()
|
||||
|
||||
def test_personalization2(self):
|
||||
"""Test some of the personalization operations."""
|
||||
cls = personalization.SdKeyScp80Kvn01DesEnc
|
||||
pes = ProfileElementSequence.from_der(self.per_input)
|
||||
prev_val = tuple(cls.get_values_from_pes(pes))
|
||||
print(f'{prev_val=}')
|
||||
self.assertTrue(prev_val)
|
||||
|
||||
set_val = '42342342342342342342342342342342'
|
||||
param = cls(set_val)
|
||||
param.validate()
|
||||
param.apply(pes)
|
||||
|
||||
get_val1 = tuple(cls.get_values_from_pes(pes))
|
||||
print(f'{get_val1=} {set_val=}')
|
||||
self.assertEqual(get_val1, ({cls.name: set_val},))
|
||||
|
||||
get_val1b = tuple(cls.get_values_from_pes(pes))
|
||||
print(f'{get_val1b=} {set_val=}')
|
||||
self.assertEqual(get_val1b, ({cls.name: set_val},))
|
||||
|
||||
der = pes.to_der()
|
||||
|
||||
get_val1c = tuple(cls.get_values_from_pes(pes))
|
||||
print(f'{get_val1c=} {set_val=}')
|
||||
self.assertEqual(get_val1c, ({cls.name: set_val},))
|
||||
|
||||
# assertTrue to not dump the entire der.
|
||||
# Expecting the modified DER to be different. If this assertion fails, then no change has happened in the output
|
||||
# DER and the ConfigurableParameter subclass is buggy.
|
||||
self.assertTrue(der != self.per_input)
|
||||
|
||||
pes2 = ProfileElementSequence.from_der(der)
|
||||
get_val2 = tuple(cls.get_values_from_pes(pes2))
|
||||
print(f'{get_val2=} {set_val=}')
|
||||
self.assertEqual(get_val2, ({cls.name: set_val},))
|
||||
|
||||
def test_constructor_encode(self):
|
||||
"""Test that DER-encoding of PE created by "empty" constructor works without raising exception."""
|
||||
for cls in [ProfileElementMF, ProfileElementPuk, ProfileElementPin, ProfileElementTelecom,
|
||||
|
||||
@@ -1,216 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# (C) 2025 by sysmocom - s.f.m.c. GmbH <info@sysmocom.de>
|
||||
#
|
||||
# Author: Neels Hofmeyr
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import sys
|
||||
import math
|
||||
from importlib import resources
|
||||
import unittest
|
||||
from pySim.esim.saip import param_source
|
||||
|
||||
import xo
|
||||
update_expected_output = False
|
||||
|
||||
class D:
|
||||
mandatory = set()
|
||||
optional = set()
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
if (set(kwargs.keys()) - set(self.optional)) != set(self.mandatory):
|
||||
raise RuntimeError(f'{self.__class__.__name__}.__init__():'
|
||||
f' {set(kwargs.keys())=!r} - {self.optional=!r} != {self.mandatory=!r}')
|
||||
for k, v in kwargs.items():
|
||||
setattr(self, k, v)
|
||||
for k in self.optional:
|
||||
if not hasattr(self, k):
|
||||
setattr(self, k, None)
|
||||
|
||||
decimals = '0123456789'
|
||||
hexadecimals = '0123456789abcdefABCDEF'
|
||||
|
||||
class FakeRandom:
|
||||
vals = b'\xab\xcfm\xf0\x98J_\xcf\x96\x87fp5l\xe7f\xd1\xd6\x97\xc1\xf9]\x8c\x86+\xdb\t^ke\xc1r'
|
||||
i = 0
|
||||
|
||||
@classmethod
|
||||
def next(cls):
|
||||
cls.i = (cls.i + 1) % len(cls.vals)
|
||||
return cls.vals[cls.i]
|
||||
|
||||
@staticmethod
|
||||
def randint(a, b):
|
||||
d = b - a
|
||||
n_bytes = math.ceil(math.log(d, 2))
|
||||
r = int.from_bytes( bytes(FakeRandom.next() for i in range(n_bytes)) )
|
||||
return a + (r % (b - a))
|
||||
|
||||
@staticmethod
|
||||
def randbytes(n):
|
||||
return bytes(FakeRandom.next() for i in range(n))
|
||||
|
||||
|
||||
class ParamSourceTest(unittest.TestCase):
|
||||
|
||||
def test_param_source(self):
|
||||
|
||||
class ParamSourceTest(D):
|
||||
mandatory = (
|
||||
'param_source',
|
||||
'n',
|
||||
'expect',
|
||||
)
|
||||
optional = (
|
||||
'expect_arg',
|
||||
'csv_rows',
|
||||
)
|
||||
|
||||
def expect_const(t, vals):
|
||||
return tuple(t.expect_arg) == tuple(vals)
|
||||
|
||||
def expect_random(t, vals):
|
||||
chars = t.expect_arg.get('digits')
|
||||
repetitions = (t.n - len(set(vals)))
|
||||
if repetitions:
|
||||
raise RuntimeError(f'expect_random: there are {repetitions} repetitions in the returned values: {vals}')
|
||||
for val_i in range(len(vals)):
|
||||
v = vals[val_i]
|
||||
val_minlen = t.expect_arg.get('val_minlen')
|
||||
val_maxlen = t.expect_arg.get('val_maxlen')
|
||||
if len(v) < val_minlen or len(v) > val_maxlen:
|
||||
raise RuntimeError(f'expect_random: invalid length {len(v)} for value [{val_i}]: {v!r}, expecting'
|
||||
f' {val_minlen}..{val_maxlen}')
|
||||
|
||||
if chars is not None and not all(c in chars for c in v):
|
||||
raise RuntimeError(f'expect_random: invalid char in value [{val_i}]: {v!r}')
|
||||
return True
|
||||
|
||||
param_source_tests = [
|
||||
ParamSourceTest(param_source=param_source.ConstantSource.from_str('123'),
|
||||
n=3,
|
||||
expect=expect_const,
|
||||
expect_arg=('123', '123', '123')
|
||||
),
|
||||
ParamSourceTest(param_source=param_source.RandomDigitSource.from_str('12345'),
|
||||
n=3,
|
||||
expect=expect_random,
|
||||
expect_arg={'digits': decimals,
|
||||
'val_minlen': 5,
|
||||
'val_maxlen': 5,
|
||||
},
|
||||
),
|
||||
ParamSourceTest(param_source=param_source.RandomDigitSource.from_str('1..999'),
|
||||
n=10,
|
||||
expect=expect_random,
|
||||
expect_arg={'digits': decimals,
|
||||
'val_minlen': 1,
|
||||
'val_maxlen': 3,
|
||||
},
|
||||
),
|
||||
ParamSourceTest(param_source=param_source.RandomDigitSource.from_str('001..999'),
|
||||
n=10,
|
||||
expect=expect_random,
|
||||
expect_arg={'digits': decimals,
|
||||
'val_minlen': 3,
|
||||
'val_maxlen': 3,
|
||||
},
|
||||
),
|
||||
ParamSourceTest(param_source=param_source.RandomHexDigitSource.from_str('12345678'),
|
||||
n=3,
|
||||
expect=expect_random,
|
||||
expect_arg={'digits': hexadecimals,
|
||||
'val_minlen': 8,
|
||||
'val_maxlen': 8,
|
||||
},
|
||||
),
|
||||
ParamSourceTest(param_source=param_source.RandomHexDigitSource.from_str('0*8'),
|
||||
n=3,
|
||||
expect=expect_random,
|
||||
expect_arg={'digits': hexadecimals,
|
||||
'val_minlen': 8,
|
||||
'val_maxlen': 8,
|
||||
},
|
||||
),
|
||||
ParamSourceTest(param_source=param_source.RandomHexDigitSource.from_str('00*4'),
|
||||
n=3,
|
||||
expect=expect_random,
|
||||
expect_arg={'digits': hexadecimals,
|
||||
'val_minlen': 8,
|
||||
'val_maxlen': 8,
|
||||
},
|
||||
),
|
||||
ParamSourceTest(param_source=param_source.IncDigitSource.from_str('10001'),
|
||||
n=3,
|
||||
expect=expect_const,
|
||||
expect_arg=('10001', '10002', '10003')
|
||||
),
|
||||
ParamSourceTest(param_source=param_source.CsvSource('column_name'),
|
||||
n=3,
|
||||
expect=expect_const,
|
||||
expect_arg=('first val', 'second val', 'third val'),
|
||||
csv_rows=(
|
||||
{'column_name': 'first val',},
|
||||
{'column_name': 'second val',},
|
||||
{'column_name': 'third val',},
|
||||
)
|
||||
),
|
||||
]
|
||||
|
||||
outputs = []
|
||||
|
||||
for t in param_source_tests:
|
||||
try:
|
||||
if hasattr(t.param_source, 'random_impl'):
|
||||
t.param_source.random_impl = FakeRandom
|
||||
|
||||
vals = []
|
||||
for i in range(t.n):
|
||||
csv_row = None
|
||||
if t.csv_rows is not None:
|
||||
csv_row = t.csv_rows[i]
|
||||
vals.append( t.param_source.get_next(csv_row=csv_row) )
|
||||
if not t.expect(t, vals):
|
||||
raise RuntimeError(f'invalid values returned: returned {vals}')
|
||||
output = f'ok: {t.param_source.__class__.__name__} {vals=!r}'
|
||||
outputs.append(output)
|
||||
print(output)
|
||||
except RuntimeError as e:
|
||||
raise RuntimeError(f'{t.param_source.__class__.__name__} {t.n=} {t.expect.__name__}({t.expect_arg!r}): {e}') from e
|
||||
|
||||
output = '\n'.join(outputs) + '\n'
|
||||
xo_name = 'test_param_src'
|
||||
if update_expected_output:
|
||||
with resources.path(xo, xo_name) as xo_path:
|
||||
with open(xo_path, 'w', encoding='utf-8') as f:
|
||||
f.write(output)
|
||||
else:
|
||||
xo_str = resources.read_text(xo, xo_name)
|
||||
if xo_str != output:
|
||||
at = 0
|
||||
while at < len(output):
|
||||
if output[at] == xo_str[at]:
|
||||
at += 1
|
||||
continue
|
||||
break
|
||||
|
||||
raise RuntimeError(f'output differs from expected output at position {at}: {xo_str[at:at+128]!r}')
|
||||
|
||||
if __name__ == "__main__":
|
||||
if '-u' in sys.argv:
|
||||
update_expected_output = True
|
||||
sys.argv.remove('-u')
|
||||
unittest.main()
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,9 +0,0 @@
|
||||
ok: ConstantSource vals=['123', '123', '123']
|
||||
ok: RandomDigitSource vals=['13987', '49298', '55670']
|
||||
ok: RandomDigitSource vals=['650', '580', '49', '885', '497', '195', '320', '137', '245', '663']
|
||||
ok: RandomDigitSource vals=['638', '025', '232', '779', '826', '972', '650', '580', '049', '885']
|
||||
ok: RandomHexDigitSource vals=['6b65c172', 'abcf6df0', '984a5fcf']
|
||||
ok: RandomHexDigitSource vals=['96876670', '356ce766', 'd1d697c1']
|
||||
ok: RandomHexDigitSource vals=['f95d8c86', '2bdb095e', '6b65c172']
|
||||
ok: IncDigitSource vals=['10001', '10002', '10003']
|
||||
ok: CsvSource vals=['first val', 'second val', 'third val']
|
||||
Reference in New Issue
Block a user