mirror of
https://gitea.osmocom.org/sim-card/pysim.git
synced 2026-04-11 08:52:38 +03:00
Compare commits
38 Commits
neels/wip
...
osmith/sai
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9b5a1f3387 | ||
|
|
1cca1e5c1e | ||
|
|
ed648f7ed1 | ||
|
|
e481eae39d | ||
|
|
c7b79a368d | ||
|
|
6b4399f83b | ||
|
|
ebccf9ae7e | ||
|
|
9055ae841c | ||
|
|
0546de12c9 | ||
|
|
d48ec5954b | ||
|
|
30f9088130 | ||
|
|
d4a3daec41 | ||
|
|
0e96683db6 | ||
|
|
8375b1c82b | ||
|
|
77ef52ec74 | ||
|
|
5eee7ed410 | ||
|
|
5b483f49fa | ||
|
|
4818737f07 | ||
|
|
0f494c29b3 | ||
|
|
5ef1805135 | ||
|
|
377c6c9827 | ||
|
|
a1bf1c120d | ||
|
|
b5b5ceb74b | ||
|
|
fa46ba9ffa | ||
|
|
6b7c99d0ae | ||
|
|
716c95a12a | ||
|
|
de4e7611a1 | ||
|
|
b55d25547a | ||
|
|
9b85def1f0 | ||
|
|
2a0fc69464 | ||
|
|
ba8f4bda15 | ||
|
|
22b8d0ef0c | ||
|
|
5e0ef1c28a | ||
|
|
42523874ac | ||
|
|
bbf8c00e5d | ||
|
|
1e27754bb4 | ||
|
|
9c6cdc7650 | ||
|
|
828bdffbb5 |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -3,7 +3,6 @@
|
|||||||
|
|
||||||
/docs/_*
|
/docs/_*
|
||||||
/docs/generated
|
/docs/generated
|
||||||
/docs/filesystem.rst
|
|
||||||
/.cache
|
/.cache
|
||||||
/.local
|
/.local
|
||||||
/build
|
/build
|
||||||
|
|||||||
@@ -285,7 +285,10 @@ if __name__ == '__main__':
|
|||||||
option_parser.add_argument("--admin", action='store_true', help="perform action as admin", default=False)
|
option_parser.add_argument("--admin", action='store_true', help="perform action as admin", default=False)
|
||||||
opts = option_parser.parse_args()
|
opts = option_parser.parse_args()
|
||||||
|
|
||||||
PySimLogger.setup(print, {logging.WARN: "\033[33m"}, opts.verbose)
|
PySimLogger.setup(print, {logging.WARN: "\033[33m"})
|
||||||
|
if (opts.verbose):
|
||||||
|
PySimLogger.set_verbose(True)
|
||||||
|
PySimLogger.set_level(logging.DEBUG)
|
||||||
|
|
||||||
# Open CSV file
|
# Open CSV file
|
||||||
cr = open_csv(opts)
|
cr = open_csv(opts)
|
||||||
|
|||||||
@@ -30,48 +30,6 @@ from pathlib import Path
|
|||||||
|
|
||||||
logger = logging.getLogger(Path(__file__).stem)
|
logger = logging.getLogger(Path(__file__).stem)
|
||||||
|
|
||||||
option_parser = argparse.ArgumentParser(description='Tool to send OTA SMS RFM/RAM messages via SMPP',
|
|
||||||
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
|
||||||
option_parser.add_argument("--host", help="Host/IP of the SMPP server", default="localhost")
|
|
||||||
option_parser.add_argument("--port", help="TCP port of the SMPP server", default=2775, type=int)
|
|
||||||
option_parser.add_argument("--system-id", help="System ID to use to bind to the SMPP server", default="test")
|
|
||||||
option_parser.add_argument("--password", help="Password to use to bind to the SMPP server", default="test")
|
|
||||||
option_parser.add_argument("--verbose", help="Enable verbose logging", action='store_true', default=False)
|
|
||||||
algo_crypt_choices = []
|
|
||||||
algo_crypt_classes = OtaAlgoCrypt.__subclasses__()
|
|
||||||
for cls in algo_crypt_classes:
|
|
||||||
algo_crypt_choices.append(cls.enum_name)
|
|
||||||
option_parser.add_argument("--algo-crypt", choices=algo_crypt_choices, default='triple_des_cbc2',
|
|
||||||
help="OTA crypt algorithm")
|
|
||||||
algo_auth_choices = []
|
|
||||||
algo_auth_classes = OtaAlgoAuth.__subclasses__()
|
|
||||||
for cls in algo_auth_classes:
|
|
||||||
algo_auth_choices.append(cls.enum_name)
|
|
||||||
option_parser.add_argument("--algo-auth", choices=algo_auth_choices, default='triple_des_cbc2',
|
|
||||||
help="OTA auth algorithm")
|
|
||||||
option_parser.add_argument('--kic', required=True, type=is_hexstr, help='OTA key (KIC)')
|
|
||||||
option_parser.add_argument('--kic-idx', default=1, type=int, help='OTA key index (KIC)')
|
|
||||||
option_parser.add_argument('--kid', required=True, type=is_hexstr, help='OTA key (KID)')
|
|
||||||
option_parser.add_argument('--kid-idx', default=1, type=int, help='OTA key index (KID)')
|
|
||||||
option_parser.add_argument('--cntr', default=0, type=int, help='replay protection counter')
|
|
||||||
option_parser.add_argument('--tar', required=True, type=is_hexstr, help='Toolkit Application Reference')
|
|
||||||
option_parser.add_argument("--cntr-req", choices=CNTR_REQ.decmapping.values(), default='no_counter',
|
|
||||||
help="Counter requirement")
|
|
||||||
option_parser.add_argument('--no-ciphering', action='store_true', default=False, help='Disable ciphering')
|
|
||||||
option_parser.add_argument("--rc-cc-ds", choices=RC_CC_DS.decmapping.values(), default='cc',
|
|
||||||
help="message check (rc=redundency check, cc=crypt. checksum, ds=digital signature)")
|
|
||||||
option_parser.add_argument('--por-in-submit', action='store_true', default=False,
|
|
||||||
help='require PoR to be sent via SMS-SUBMIT')
|
|
||||||
option_parser.add_argument('--por-no-ciphering', action='store_true', default=False, help='Disable ciphering (PoR)')
|
|
||||||
option_parser.add_argument("--por-rc-cc-ds", choices=RC_CC_DS.decmapping.values(), default='cc',
|
|
||||||
help="PoR check (rc=redundency check, cc=crypt. checksum, ds=digital signature)")
|
|
||||||
option_parser.add_argument("--por-req", choices=POR_REQ.decmapping.values(), default='por_required',
|
|
||||||
help="Proof of Receipt requirements")
|
|
||||||
option_parser.add_argument('--src-addr', default='12', type=str, help='SMS source address (MSISDN)')
|
|
||||||
option_parser.add_argument('--dest-addr', default='23', type=str, help='SMS destination address (MSISDN)')
|
|
||||||
option_parser.add_argument('--timeout', default=10, type=int, help='Maximum response waiting time')
|
|
||||||
option_parser.add_argument('-a', '--apdu', action='append', required=True, type=is_hexstr, help='C-APDU to send')
|
|
||||||
|
|
||||||
class SmppHandler:
|
class SmppHandler:
|
||||||
client = None
|
client = None
|
||||||
|
|
||||||
@@ -183,7 +141,7 @@ class SmppHandler:
|
|||||||
tuple containing the last response data and the last status word as byte strings
|
tuple containing the last response data and the last status word as byte strings
|
||||||
"""
|
"""
|
||||||
|
|
||||||
logger.info("C-APDU sending: %s...", b2h(apdu))
|
logger.info("C-APDU sending: %s..." % b2h(apdu))
|
||||||
|
|
||||||
# translate to Secured OTA RFM
|
# translate to Secured OTA RFM
|
||||||
secured = self.ota_dialect.encode_cmd(self.ota_keyset, self.tar, self.spi, apdu=apdu)
|
secured = self.ota_dialect.encode_cmd(self.ota_keyset, self.tar, self.spi, apdu=apdu)
|
||||||
@@ -209,28 +167,65 @@ class SmppHandler:
|
|||||||
return h2b(resp), h2b(sw)
|
return h2b(resp), h2b(sw)
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
option_parser = argparse.ArgumentParser(description='CSV importer for pySim-shell\'s PostgreSQL Card Key Provider',
|
||||||
|
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||||
|
option_parser.add_argument("--host", help="Host/IP of the SMPP server", default="localhost")
|
||||||
|
option_parser.add_argument("--port", help="TCP port of the SMPP server", default=2775, type=int)
|
||||||
|
option_parser.add_argument("--system-id", help="System ID to use to bind to the SMPP server", default="test")
|
||||||
|
option_parser.add_argument("--password", help="Password to use to bind to the SMPP server", default="test")
|
||||||
|
option_parser.add_argument("--verbose", help="Enable verbose logging", action='store_true', default=False)
|
||||||
|
algo_crypt_choices = []
|
||||||
|
algo_crypt_classes = OtaAlgoCrypt.__subclasses__()
|
||||||
|
for cls in algo_crypt_classes:
|
||||||
|
algo_crypt_choices.append(cls.enum_name)
|
||||||
|
option_parser.add_argument("--algo-crypt", choices=algo_crypt_choices, default='triple_des_cbc2',
|
||||||
|
help="OTA crypt algorithm")
|
||||||
|
algo_auth_choices = []
|
||||||
|
algo_auth_classes = OtaAlgoAuth.__subclasses__()
|
||||||
|
for cls in algo_auth_classes:
|
||||||
|
algo_auth_choices.append(cls.enum_name)
|
||||||
|
option_parser.add_argument("--algo-auth", choices=algo_auth_choices, default='triple_des_cbc2',
|
||||||
|
help="OTA auth algorithm")
|
||||||
|
option_parser.add_argument('--kic', required=True, type=is_hexstr, help='OTA key (KIC)')
|
||||||
|
option_parser.add_argument('--kic_idx', default=1, type=int, help='OTA key index (KIC)')
|
||||||
|
option_parser.add_argument('--kid', required=True, type=is_hexstr, help='OTA key (KID)')
|
||||||
|
option_parser.add_argument('--kid_idx', default=1, type=int, help='OTA key index (KID)')
|
||||||
|
option_parser.add_argument('--cntr', default=0, type=int, help='replay protection counter')
|
||||||
|
option_parser.add_argument('--tar', required=True, type=is_hexstr, help='Toolkit Application Reference')
|
||||||
|
option_parser.add_argument("--cntr_req", choices=CNTR_REQ.decmapping.values(), default='no_counter',
|
||||||
|
help="Counter requirement")
|
||||||
|
option_parser.add_argument('--ciphering', default=True, type=bool, help='Enable ciphering')
|
||||||
|
option_parser.add_argument("--rc-cc-ds", choices=RC_CC_DS.decmapping.values(), default='cc',
|
||||||
|
help="message check (rc=redundency check, cc=crypt. checksum, ds=digital signature)")
|
||||||
|
option_parser.add_argument('--por-in-submit', default=False, type=bool,
|
||||||
|
help='require PoR to be sent via SMS-SUBMIT')
|
||||||
|
option_parser.add_argument('--por-shall-be-ciphered', default=True, type=bool, help='require encrypted PoR')
|
||||||
|
option_parser.add_argument("--por-rc-cc-ds", choices=RC_CC_DS.decmapping.values(), default='cc',
|
||||||
|
help="PoR check (rc=redundency check, cc=crypt. checksum, ds=digital signature)")
|
||||||
|
option_parser.add_argument("--por_req", choices=POR_REQ.decmapping.values(), default='por_required',
|
||||||
|
help="Proof of Receipt requirements")
|
||||||
|
option_parser.add_argument('--src-addr', default='12', type=str, help='TODO')
|
||||||
|
option_parser.add_argument('--dest-addr', default='23', type=str, help='TODO')
|
||||||
|
option_parser.add_argument('--timeout', default=10, type=int, help='TODO')
|
||||||
|
option_parser.add_argument('-a', '--apdu', action='append', required=True, type=is_hexstr, help='C-APDU to send')
|
||||||
opts = option_parser.parse_args()
|
opts = option_parser.parse_args()
|
||||||
|
|
||||||
logging.basicConfig(level=logging.DEBUG if opts.verbose else logging.INFO,
|
logging.basicConfig(level=logging.DEBUG if opts.verbose else logging.INFO,
|
||||||
format='%(asctime)s %(levelname)s %(message)s',
|
format='%(asctime)s %(levelname)s %(message)s',
|
||||||
datefmt='%Y-%m-%d %H:%M:%S')
|
datefmt='%Y-%m-%d %H:%M:%S')
|
||||||
|
|
||||||
if opts.kic_idx != opts.kid_idx:
|
|
||||||
logger.warning("KIC index (%s) and KID index (%s) are different (security violation, card should reject message)",
|
|
||||||
opts.kic_idx, opts.kid_idx)
|
|
||||||
|
|
||||||
ota_keyset = OtaKeyset(algo_crypt=opts.algo_crypt,
|
ota_keyset = OtaKeyset(algo_crypt=opts.algo_crypt,
|
||||||
kic_idx=opts.kic_idx,
|
kic_idx=opts.kic_idx,
|
||||||
kic=h2b(opts.kic),
|
kic=h2b(opts.kic),
|
||||||
algo_auth=opts.algo_auth,
|
algo_auth=opts.algo_auth,
|
||||||
kid_idx=opts.kid_idx,
|
kid_idx=opts.kic_idx,
|
||||||
kid=h2b(opts.kid),
|
kid=h2b(opts.kid),
|
||||||
cntr=opts.cntr)
|
cntr=opts.cntr)
|
||||||
spi = {'counter' : opts.cntr_req,
|
spi = {'counter' : opts.cntr_req,
|
||||||
'ciphering' : not opts.no_ciphering,
|
'ciphering' : opts.ciphering,
|
||||||
'rc_cc_ds': opts.rc_cc_ds,
|
'rc_cc_ds': opts.rc_cc_ds,
|
||||||
'por_in_submit': opts.por_in_submit,
|
'por_in_submit':opts.por_in_submit,
|
||||||
'por_shall_be_ciphered': not opts.por_no_ciphering,
|
'por_shall_be_ciphered':opts.por_shall_be_ciphered,
|
||||||
'por_rc_cc_ds': opts.por_rc_cc_ds,
|
'por_rc_cc_ds': opts.por_rc_cc_ds,
|
||||||
'por': opts.por_req}
|
'por': opts.por_req}
|
||||||
apdu = h2b("".join(opts.apdu))
|
apdu = h2b("".join(opts.apdu))
|
||||||
|
|||||||
@@ -305,16 +305,16 @@ the requested data.
|
|||||||
|
|
||||||
|
|
||||||
ADM PIN
|
ADM PIN
|
||||||
^^^^^^^
|
~~~~~~~
|
||||||
|
|
||||||
The `verify_adm` command will attempt to look up the `ADM1` column
|
The `verify_adm` command will attempt to look up the `ADM1` column
|
||||||
indexed by the ICCID of the SIM/UICC.
|
indexed by the ICCID of the SIM/UICC.
|
||||||
|
|
||||||
|
|
||||||
SCP02 / SCP03
|
SCP02 / SCP03
|
||||||
^^^^^^^^^^^^^
|
~~~~~~~~~~~~~
|
||||||
|
|
||||||
SCP02 and SCP03 each use key triplets consisting of ENC, MAC and DEK
|
SCP02 and SCP03 each use key triplets consisting if ENC, MAC and DEK
|
||||||
keys. For more details, see the applicable GlobalPlatform
|
keys. For more details, see the applicable GlobalPlatform
|
||||||
specifications.
|
specifications.
|
||||||
|
|
||||||
|
|||||||
26
docs/conf.py
26
docs/conf.py
@@ -13,7 +13,6 @@
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
sys.path.insert(0, os.path.abspath('..'))
|
sys.path.insert(0, os.path.abspath('..'))
|
||||||
sys.path.insert(0, os.path.abspath('.')) # for local extensions (pysim_fs_sphinx, ...)
|
|
||||||
|
|
||||||
|
|
||||||
# -- Project information -----------------------------------------------------
|
# -- Project information -----------------------------------------------------
|
||||||
@@ -40,8 +39,7 @@ extensions = [
|
|||||||
"sphinx.ext.autodoc",
|
"sphinx.ext.autodoc",
|
||||||
"sphinxarg.ext",
|
"sphinxarg.ext",
|
||||||
"sphinx.ext.autosectionlabel",
|
"sphinx.ext.autosectionlabel",
|
||||||
"sphinx.ext.napoleon",
|
"sphinx.ext.napoleon"
|
||||||
"pysim_fs_sphinx",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
# Add any paths that contain templates here, relative to this directory.
|
# Add any paths that contain templates here, relative to this directory.
|
||||||
@@ -66,25 +64,3 @@ html_theme = 'alabaster'
|
|||||||
html_static_path = ['_static']
|
html_static_path = ['_static']
|
||||||
|
|
||||||
autoclass_content = 'both'
|
autoclass_content = 'both'
|
||||||
|
|
||||||
# Mock optional server-side deps of es2p and http_json_api/es9p,
|
|
||||||
# so that autodoc can import and document those modules.
|
|
||||||
autodoc_mock_imports = ['klein', 'twisted']
|
|
||||||
|
|
||||||
# Workaround for duplicate label warnings:
|
|
||||||
# https://github.com/sphinx-doc/sphinx-argparse/issues/14
|
|
||||||
#
|
|
||||||
# sphinxarg.ext generates generic sub-headings ("Named arguments",
|
|
||||||
# "Positional arguments", "Sub-commands", "General options", ...) for every
|
|
||||||
# argparse command/tool. These repeat across many files and trigger tons
|
|
||||||
# of autosectionlabel duplicate-label warnings - suppress them.
|
|
||||||
autosectionlabel_maxdepth = 3
|
|
||||||
suppress_warnings = [
|
|
||||||
'autosectionlabel.filesystem',
|
|
||||||
'autosectionlabel.saip-tool',
|
|
||||||
'autosectionlabel.shell',
|
|
||||||
'autosectionlabel.smpp2sim',
|
|
||||||
'autosectionlabel.smpp-ota-tool',
|
|
||||||
'autosectionlabel.suci-keytool',
|
|
||||||
'autosectionlabel.trace',
|
|
||||||
]
|
|
||||||
|
|||||||
@@ -39,7 +39,6 @@ pySim consists of several parts:
|
|||||||
:caption: Contents:
|
:caption: Contents:
|
||||||
|
|
||||||
shell
|
shell
|
||||||
filesystem
|
|
||||||
trace
|
trace
|
||||||
legacy
|
legacy
|
||||||
smpp2sim
|
smpp2sim
|
||||||
@@ -49,7 +48,6 @@ pySim consists of several parts:
|
|||||||
sim-rest
|
sim-rest
|
||||||
suci-keytool
|
suci-keytool
|
||||||
saip-tool
|
saip-tool
|
||||||
smpp-ota-tool
|
|
||||||
|
|
||||||
|
|
||||||
Indices and tables
|
Indices and tables
|
||||||
|
|||||||
@@ -205,7 +205,7 @@ Specifically, pySim-read will dump the following:
|
|||||||
|
|
||||||
* DF.GSM
|
* DF.GSM
|
||||||
|
|
||||||
* EF.IMSI
|
* EF,IMSI
|
||||||
* EF.GID1
|
* EF.GID1
|
||||||
* EF.GID2
|
* EF.GID2
|
||||||
* EF.SMSP
|
* EF.SMSP
|
||||||
|
|||||||
@@ -1,836 +0,0 @@
|
|||||||
Guide: Managing GP Keys
|
|
||||||
=======================
|
|
||||||
|
|
||||||
Most of today's smartcards follow the GlobalPlatform Card Specification and the included Security Domain model.
|
|
||||||
UICCs and eUCCCs are no exception here.
|
|
||||||
|
|
||||||
The Security Domain acts as an on-card representative of a card authority or administrator. It is used to perform tasks
|
|
||||||
like the installation of applications or the provisioning and rotation of secure channel keys. It also acts as a secure
|
|
||||||
key storage and offers all kinds of cryptographic services to applications that are installed under a specific
|
|
||||||
Security Domain (see also GlobalPlatform Card Specification, section 7).
|
|
||||||
|
|
||||||
In this tutorial, we will show how to work with the key material (keysets) stored inside a Security Domain and how to
|
|
||||||
rotate (replace) existing keys. We will also show how to provision new keys.
|
|
||||||
|
|
||||||
.. warning:: Making changes to keysets requires extreme caution as misconfigured keysets may lock you out permanently.
|
|
||||||
It's also strongly recommended to maintain at least one backup keyset that you can use as fallback in case
|
|
||||||
the primary keyset becomes unusable for some reason.
|
|
||||||
|
|
||||||
|
|
||||||
Selecting a Security Domain
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
A typical smartcard, such as an UICC will have one primary Security Domain, called the Issuer Security Domain (ISD).
|
|
||||||
When working with those cards, the ISD will show up in the UICC filesystem tree as `ADF.ISD` and can be selected like
|
|
||||||
any other file.
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
pySIM-shell (00:MF)> select ADF.ISD
|
|
||||||
{
|
|
||||||
"application_id": "a000000003000000",
|
|
||||||
"proprietary_data": {
|
|
||||||
"maximum_length_of_data_field_in_command_message": 255
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
When working with eUICCs, multiple Security Domains are involved. The model is fundamentally different from the classic
|
|
||||||
model with one primary Security Domain (ISD). In the case of eUICCs, an ISD-R (Issuer Security Domain - Root) and an
|
|
||||||
ISD-P (Issuer Security Domain - Profile) exist (see also: GSMA SGP.02, section 2.2.1).
|
|
||||||
|
|
||||||
The ISD-P is established by the ISD-R during the profile installation and serves as a secure container for an eSIM
|
|
||||||
profile. Within the ISD-P the eSIM profile establishes a dedicated Security Domain called `MNO-SD` (see also GSMA
|
|
||||||
SGP.02, section 2.2.4). This `MNO-SD` is comparable to the Issuer Security Domain (ISD) we find on UICCs. The AID of
|
|
||||||
`MNO-SD` is either the default AID for the Issuer Security Domain (see also GlobalPlatform, section H.1.3) or a
|
|
||||||
different value specified by the provider of the eSIM profile.
|
|
||||||
|
|
||||||
Since the AID of the `MNO-SD` is not a fixed value, it is not known by `pySim-shell`. This means there will be no
|
|
||||||
`ADF.ISD` file shown in the file system, but we can simply select the `ADF.ISD-R` first and then select the `MNO-SD`
|
|
||||||
using a raw APDU. In the following example we assume that the default AID (``a000000151000000``) is used The APDU
|
|
||||||
would look like this: ``00a4040408`` + ``a000000151000000`` + ``00``
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
pySIM-shell (00:MF)> select ADF.ISD-R
|
|
||||||
{
|
|
||||||
"application_id": "a0000005591010ffffffff8900000100",
|
|
||||||
"proprietary_data": {
|
|
||||||
"maximum_length_of_data_field_in_command_message": 255
|
|
||||||
},
|
|
||||||
"isdr_proprietary_application_template": {
|
|
||||||
"supported_version_number": "020300"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pySIM-shell (00:MF/ADF.ISD-R)> apdu 00a4040408a00000015100000000
|
|
||||||
SW: 9000, RESP: 6f108408a000000151000000a5049f6501ff
|
|
||||||
|
|
||||||
After that, the prompt will still show the `ADF.ISD-R`, but we are actually in `ADF.ISD` and the standard GlobalPlatform
|
|
||||||
operations like `establish_scpXX`, `get_data`, and `put_key` should work. By doing this, we simply have tricked
|
|
||||||
`pySim-shell` into making the GlobalPlatform related commands available for some other Security Domain we are not
|
|
||||||
interested in. With the raw APDU we then have swapped out the Security Domain under the hood. The same workaround can
|
|
||||||
be applied to any Security Domain, provided that the AID is known to the user.
|
|
||||||
|
|
||||||
|
|
||||||
Establishing a secure channel
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
Before we can make changes to the keysets in the currently selected Security Domain we must first establish a secure
|
|
||||||
channel with that Security Domain. In the following examples we will use `SCP02` (see also GlobalPlatform Card
|
|
||||||
Specification, section E.1.1) and `SCP03` (see also GlobalPlatform Card Specification – Amendment D) to establish the
|
|
||||||
secure channel. `SCP02` is slightly older than `SCP03`. The main difference between the two is that `SCP02` uses 3DES
|
|
||||||
while `SCP03` is based on AES.
|
|
||||||
|
|
||||||
.. warning:: Secure channel protocols like `SCP02` and `SCP03` may manage an error counter to count failed login
|
|
||||||
attempts. This means attempting to establish a secure channel with a wrong keyset multiple times may lock
|
|
||||||
you out permanently. Double check the applied keyset before attempting to establish a secure channel.
|
|
||||||
|
|
||||||
.. warning:: The key values used in the following examples are random key values used for illustration purposes only.
|
|
||||||
Each UICC or eSIM profile is shipped with individual keys, which means that the keys used below will not
|
|
||||||
work with your UICC or eSIM profile. You must replace the key values with the values you have received
|
|
||||||
from your UICC vendor or eSIM profile provider.
|
|
||||||
|
|
||||||
|
|
||||||
Example: `SCP02`
|
|
||||||
----------------
|
|
||||||
|
|
||||||
In the following example, we assume that we want to establish a secure channel with the ISD of a `sysmoUSIM-SJA5` UICC.
|
|
||||||
Along with the card we have received the following keyset:
|
|
||||||
|
|
||||||
+---------+----------------------------------+
|
|
||||||
| Keyname | Keyvalue |
|
|
||||||
+=========+==================================+
|
|
||||||
| ENC/KIC | F09C43EE1A0391665CC9F05AF4E0BD10 |
|
|
||||||
+---------+----------------------------------+
|
|
||||||
| MAC/KID | 01981F4A20999F62AF99988007BAF6CA |
|
|
||||||
+---------+----------------------------------+
|
|
||||||
| DEK/KIK | 8F8AEE5CDCC5D361368BC45673D99195 |
|
|
||||||
+---------+----------------------------------+
|
|
||||||
|
|
||||||
This keyset is tied to the key version number KVN 122 and is configured as a DES keyset. We can use this keyset to
|
|
||||||
establish a secure channel using the SCP02 Secure Channel Protocol.
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
pySIM-shell (00:MF/ADF.ISD)> establish_scp02 --key-enc F09C43EE1A0391665CC9F05AF4E0BD10 --key-mac 01981F4A20999F62AF99988007BAF6CA --key-dek 8F8AEE5CDCC5D361368BC45673D99195 --key-ver 112 --security-level 3
|
|
||||||
Successfully established a SCP02[03] secure channel
|
|
||||||
|
|
||||||
|
|
||||||
Example: `SCP03`
|
|
||||||
----------------
|
|
||||||
|
|
||||||
The establishment of a secure channel via SCP03 works just the same. In the following example we will establish a
|
|
||||||
secure channel to the `MNO-SD` of an eSIM profile. The SCP03 keyset we use is tied to KVN 48 and looks like this:
|
|
||||||
|
|
||||||
+---------+------------------------------------------------------------------+
|
|
||||||
| Keyname | Keyvalue |
|
|
||||||
+=========+==================================================================+
|
|
||||||
| ENC/KIC | 63af517c29ad6ac6fcadfe6ac8a3c8a041d8141c7eb845ef1cba6112a325e430 |
|
|
||||||
+---------+------------------------------------------------------------------+
|
|
||||||
| MAC/KID | 54b9ad6713ae922f54014ed762132e7b59bdcd2a2a6beba98fb9afe6b4df27e1 |
|
|
||||||
+---------+------------------------------------------------------------------+
|
|
||||||
| DEK/KIK | cbb933ba2389da93c86c112739cd96389139f16c6f80f7d16bf3593e407ca893 |
|
|
||||||
+---------+------------------------------------------------------------------+
|
|
||||||
|
|
||||||
We assume that the `MNO-SD` is already selected (see above). We may now establish the SCP03 secure channel:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
pySIM-shell (00:MF/ADF.ISD-R)> establish_scp03 --key-enc 63af517c29ad6ac6fcadfe6ac8a3c8a041d8141c7eb845ef1cba6112a325e430 --key-mac 54b9ad6713ae922f54014ed762132e7b59bdcd2a2a6beba98fb9afe6b4df27e1 --key-dek cbb933ba2389da93c86c112739cd96389139f16c6f80f7d16bf3593e407ca893 --key-ver 48 --security-level 3
|
|
||||||
Successfully established a SCP03[03] secure channel
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Understanding Keysets
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
Before making any changes to keysets, it is recommended to check the status of the currently installed keysets. To do
|
|
||||||
so, we use the `get_data` command to retrieve the `key_information`. This command does not require the establishment of
|
|
||||||
a secure channel. We also cannot read back the key values themselves, but we get a summary of the installed keys
|
|
||||||
together with their KVN numbers, IDs, algorithm and key length values.
|
|
||||||
|
|
||||||
Example: `key_information` from a `sysmoISIM-SJA5`:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
pySIM-shell (SCP02[03]:00:MF/ADF.ISD)> get_data key_information
|
|
||||||
{
|
|
||||||
"key_information": [
|
|
||||||
{
|
|
||||||
"key_information_data": {
|
|
||||||
"key_identifier": 1,
|
|
||||||
"key_version_number": 112,
|
|
||||||
"key_types": [
|
|
||||||
{
|
|
||||||
"type": "des",
|
|
||||||
"length": 16
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"key_information_data": {
|
|
||||||
"key_identifier": 2,
|
|
||||||
"key_version_number": 112,
|
|
||||||
"key_types": [
|
|
||||||
{
|
|
||||||
"type": "des",
|
|
||||||
"length": 16
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"key_information_data": {
|
|
||||||
"key_identifier": 3,
|
|
||||||
"key_version_number": 112,
|
|
||||||
"key_types": [
|
|
||||||
{
|
|
||||||
"type": "des",
|
|
||||||
"length": 16
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"key_information_data": {
|
|
||||||
"key_identifier": 1,
|
|
||||||
"key_version_number": 1,
|
|
||||||
"key_types": [
|
|
||||||
{
|
|
||||||
"type": "des",
|
|
||||||
"length": 16
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"key_information_data": {
|
|
||||||
"key_identifier": 2,
|
|
||||||
"key_version_number": 1,
|
|
||||||
"key_types": [
|
|
||||||
{
|
|
||||||
"type": "des",
|
|
||||||
"length": 16
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"key_information_data": {
|
|
||||||
"key_identifier": 3,
|
|
||||||
"key_version_number": 1,
|
|
||||||
"key_types": [
|
|
||||||
{
|
|
||||||
"type": "des",
|
|
||||||
"length": 16
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"key_information_data": {
|
|
||||||
"key_identifier": 1,
|
|
||||||
"key_version_number": 2,
|
|
||||||
"key_types": [
|
|
||||||
{
|
|
||||||
"type": "des",
|
|
||||||
"length": 16
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"key_information_data": {
|
|
||||||
"key_identifier": 2,
|
|
||||||
"key_version_number": 2,
|
|
||||||
"key_types": [
|
|
||||||
{
|
|
||||||
"type": "des",
|
|
||||||
"length": 16
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"key_information_data": {
|
|
||||||
"key_identifier": 3,
|
|
||||||
"key_version_number": 2,
|
|
||||||
"key_types": [
|
|
||||||
{
|
|
||||||
"type": "des",
|
|
||||||
"length": 16
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"key_information_data": {
|
|
||||||
"key_identifier": 1,
|
|
||||||
"key_version_number": 47,
|
|
||||||
"key_types": [
|
|
||||||
{
|
|
||||||
"type": "des",
|
|
||||||
"length": 16
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"key_information_data": {
|
|
||||||
"key_identifier": 2,
|
|
||||||
"key_version_number": 47,
|
|
||||||
"key_types": [
|
|
||||||
{
|
|
||||||
"type": "des",
|
|
||||||
"length": 16
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"key_information_data": {
|
|
||||||
"key_identifier": 3,
|
|
||||||
"key_version_number": 47,
|
|
||||||
"key_types": [
|
|
||||||
{
|
|
||||||
"type": "des",
|
|
||||||
"length": 16
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
Example: `key_information` from a `sysmoEUICC1-C2T`:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
pySIM-shell (SCP03[03]:00:MF/ADF.ISD-R)> get_data key_information
|
|
||||||
{
|
|
||||||
"key_information": [
|
|
||||||
{
|
|
||||||
"key_information_data": {
|
|
||||||
"key_identifier": 3,
|
|
||||||
"key_version_number": 50,
|
|
||||||
"key_types": [
|
|
||||||
{
|
|
||||||
"type": "aes",
|
|
||||||
"length": 32
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"key_information_data": {
|
|
||||||
"key_identifier": 2,
|
|
||||||
"key_version_number": 50,
|
|
||||||
"key_types": [
|
|
||||||
{
|
|
||||||
"type": "aes",
|
|
||||||
"length": 32
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"key_information_data": {
|
|
||||||
"key_identifier": 1,
|
|
||||||
"key_version_number": 50,
|
|
||||||
"key_types": [
|
|
||||||
{
|
|
||||||
"type": "aes",
|
|
||||||
"length": 32
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"key_information_data": {
|
|
||||||
"key_identifier": 2,
|
|
||||||
"key_version_number": 64,
|
|
||||||
"key_types": [
|
|
||||||
{
|
|
||||||
"type": "aes",
|
|
||||||
"length": 16
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"key_information_data": {
|
|
||||||
"key_identifier": 1,
|
|
||||||
"key_version_number": 64,
|
|
||||||
"key_types": [
|
|
||||||
{
|
|
||||||
"type": "tls_psk",
|
|
||||||
"length": 16
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
The output from those two examples above may seem lengthy, but in order to move on and to provision own keys
|
|
||||||
successfully, it is important to understand each aspect of it.
|
|
||||||
|
|
||||||
Key Version Number (KVN)
|
|
||||||
------------------------
|
|
||||||
|
|
||||||
Each key is associated with a Key Version Number (KVN). Multiple keys that share the same KVN belong to the same
|
|
||||||
keyset. In the first example above we can see that four keysets with KVN numbers 112, 1, 2 and 47 are provisioned.
|
|
||||||
In the second example we see two keysets. One with KVN 50 and one with KVN 64.
|
|
||||||
|
|
||||||
The term "Key Version Number" is misleading as this number is not really a version number. It's actually a unique
|
|
||||||
identifier for a specific keyset that also defines with which Secure Channel Protocol a key can be used. This means
|
|
||||||
that the KVN is not just an arbitrary number. The following (incomplete) table gives a hint which KVN numbers may be
|
|
||||||
used with which Secure Channel Protocol.
|
|
||||||
|
|
||||||
+-----------+-------------------------------------------------------+
|
|
||||||
| KVN range | Secure Channel Protocol |
|
|
||||||
+===========+=======================================================+
|
|
||||||
| 1-15 | reserved for `SCP80` (OTA SMS) |
|
|
||||||
+-----------+-------------------------------------------------------+
|
|
||||||
| 17 | reserved for DAP specified in ETSI TS 102 226 |
|
|
||||||
+-----------+-------------------------------------------------------+
|
|
||||||
| 32-47 | reserved for `SCP02` |
|
|
||||||
+-----------+-------------------------------------------------------+
|
|
||||||
| 48-63 | reserved for `SCP03` |
|
|
||||||
+-----------+-------------------------------------------------------+
|
|
||||||
| 64-79 | reserved for `SCP81` (GSMA SGP.02, section 2.2.5.1) |
|
|
||||||
+-----------+-------------------------------------------------------+
|
|
||||||
| 112 | Token key (RSA public or DES, also used with `SCP02`) |
|
|
||||||
+-----------+-------------------------------------------------------+
|
|
||||||
| 113 | Receipt key (DES) |
|
|
||||||
+-----------+-------------------------------------------------------+
|
|
||||||
| 115 | DAP verification key (RS public or DES) |
|
|
||||||
+-----------+-------------------------------------------------------+
|
|
||||||
| 116 | reserved for CASD |
|
|
||||||
+-----------+-------------------------------------------------------+
|
|
||||||
| 117 | 16-byte DES key for Ciphered Load File Data Block |
|
|
||||||
+-----------+-------------------------------------------------------+
|
|
||||||
| 255 | reserved for ISD with SCP02 without SCP80 support |
|
|
||||||
+-----------+-------------------------------------------------------+
|
|
||||||
|
|
||||||
With that we can now understand that in the first example, the first and the last keyset is intended to be used with
|
|
||||||
`SCP02` and that the second and the third keyset is intended to be used with `SCP80` (OTA SMS). In the second example we
|
|
||||||
can see that the first keyset is intended to be used with `SCP03`, wheres the second should be usable with `SCP81`.
|
|
||||||
|
|
||||||
|
|
||||||
Key Identifier
|
|
||||||
--------------
|
|
||||||
|
|
||||||
Each keyset consists of a number of keys, where each key has a different Key Identifier. The Key Identifier is usually
|
|
||||||
an incrementing number that starts counting at 1. The Key Identifier is used to distinguish the keys within the keyset.
|
|
||||||
The exact number of keys and their attributes depends on the secure channel protocol for which the keyset is intended
|
|
||||||
for. Each secure channel protocol may have its specific requirements on how many keys of which which type, length or
|
|
||||||
Key Identifier have to be present.
|
|
||||||
|
|
||||||
However, almost all of the classic secure channel protocols (including `SCP02`, `SCP03` and `SCP81`) make use of the
|
|
||||||
following three-key scheme:
|
|
||||||
|
|
||||||
+----------------+---------+---------------------------------------+
|
|
||||||
| Key Identifier | Keyname | Purpose |
|
|
||||||
+================+=========+=======================================+
|
|
||||||
| 1 | ENC/KIC | encryption/decryption |
|
|
||||||
+----------------+---------+---------------------------------------+
|
|
||||||
| 2 | MAC/KID | cryptographic checksumming/signing |
|
|
||||||
+----------------+---------+---------------------------------------+
|
|
||||||
| 3 | DEK/KIK | encryption/decryption of key material |
|
|
||||||
+----------------+---------+---------------------------------------+
|
|
||||||
|
|
||||||
In this case, all three keys share the same length and are used with the same algorithm. The key length is often used
|
|
||||||
to implicitly select sub-types of an algorithm. (e.g. a 16 byte key of type `aes` is associated with `AES128`, where a 32
|
|
||||||
byte key would be associated with `AES256`).
|
|
||||||
|
|
||||||
The second example shows that different schemes are possible. The `SCP80` keyset from the second example uses a scheme
|
|
||||||
that works with two keys:
|
|
||||||
|
|
||||||
+----------------+---------+---------------------------------------+
|
|
||||||
| Key Identifier | Keyname | Purpose |
|
|
||||||
+================+=========+=======================================+
|
|
||||||
| 1 | TLS-PSK | pre-shared key used for TLS |
|
|
||||||
+----------------+---------+---------------------------------------+
|
|
||||||
| 2 | DEK/KIK | encryption/decryption of key material |
|
|
||||||
+----------------+---------+---------------------------------------+
|
|
||||||
|
|
||||||
It should also be noted that the order in which keysets and keys appear is an implementation detail of the UICC/eUICC
|
|
||||||
O/S. The order has no influence on how a keyset is interpreted. Only the Key Version Number (KVN) and the Key Identifier
|
|
||||||
matter.
|
|
||||||
|
|
||||||
|
|
||||||
Rotating a keyset
|
|
||||||
~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
Rotating keys is one of the most basic tasks one might want to perform on an UICC/eUICC before using it productively. In
|
|
||||||
the following example we will illustrate how key rotation can be done. When rotating keys, only the key itself may
|
|
||||||
change. For example it is not possible to change the key length or the algorithm used (see also GlobalPlatform Card
|
|
||||||
Specification, section 11.8.2.3.3). Any key of the current Security Domain can be rotated, this also includes the key
|
|
||||||
that was used to establish the secure channel.
|
|
||||||
|
|
||||||
In the following example we assume that the Security Domain is selected and a secure channel is already established. We
|
|
||||||
intend to rotate the keyset with KVN 112. Since this keyset uses triple DES keys with a key length of 16, we must
|
|
||||||
replace it with a keyset with keys of the same nature.
|
|
||||||
|
|
||||||
The new keyset shall look like this:
|
|
||||||
|
|
||||||
+----------------+---------+----------------------------------+
|
|
||||||
| Key Identifier | Keyname | Keyvalue |
|
|
||||||
+================+=========+==================================+
|
|
||||||
| 1 | ENC/KIC | 542C37A6043679F2F9F71116418B1CD5 |
|
|
||||||
+----------------+---------+----------------------------------+
|
|
||||||
| 2 | MAC/KID | 34F11BAC8E5390B57F4E601372339E3C |
|
|
||||||
+----------------+---------+----------------------------------+
|
|
||||||
| 3 | DEK/KIK | 5524F4BECFE96FB63FC29D6BAAC6058B |
|
|
||||||
+----------------+---------+----------------------------------+
|
|
||||||
|
|
||||||
When passing the keys to the `put_key` commandline, we set the Key Identifier of the first key using the `--key-id`
|
|
||||||
parameter. This Key Identifier will be valid for the first key (KIC) we pass. For all consecutive keys, the Key
|
|
||||||
Identifier will be incremented automatically (see also GlobalPlatform Card Specification, section 11.8.2.2). To Ensure
|
|
||||||
that the new KIC, KID and KIK keys get the correct Key Identifiers, it is crucial to maintain order when passing the
|
|
||||||
keys in the `--key-data` arguments. It is also important that each `--key-data` argument is preceded by a `--key-type`
|
|
||||||
argument that sets the algorithm correctly (`des` in this case).
|
|
||||||
|
|
||||||
Finally we have to target the keyset we want to rotate by its KVN. The `--old-key-version-nr` argument is set to 112
|
|
||||||
as this identifies the keyset we want to rotate. The `--key-version-nr` is also set to 112 as we do not want
|
|
||||||
KVN to be changed in this example. Changing the KVN while rotating a keyset is possible. In case the KVN has to change
|
|
||||||
for some reason, the new KVN must be selected carefully to keep the key usable with the associated Secure Channel
|
|
||||||
Protocol.
|
|
||||||
|
|
||||||
The commandline that matches the keyset we had laid out above looks like this:
|
|
||||||
::
|
|
||||||
|
|
||||||
pySIM-shell (SCP02[03]:00:MF/ADF.ISD)> put_key --key-id 1 --key-type des --key-data 542C37A6043679F2F9F71116418B1CD5 --key-type des --key-data 34F11BAC8E5390B57F4E601372339E3C --key-type des --key-data 5524F4BECFE96FB63FC29D6BAAC6058B --old-key-version-nr 112 --key-version-nr 112
|
|
||||||
|
|
||||||
After executing this put_key commandline, the keyset identified by KVN 122 is equipped with new keys. We can use
|
|
||||||
`get_data key_information` to inspect the currently installed keysets. The output should appear unchanged as
|
|
||||||
we only swapped out the keys. All other parameters, identifiers etc. should remain constant.
|
|
||||||
|
|
||||||
.. warning:: It is technically possible to rotate a keyset in a `non atomic` way using one `put_key` commandline for
|
|
||||||
each key. However, in case the targeted keyset is the one used to establish the current secure channel,
|
|
||||||
this method should not be used since, depending on the UICC/eUICC model, half-written key material may
|
|
||||||
interrupt the current secure channel.
|
|
||||||
|
|
||||||
|
|
||||||
Removing a keyset
|
|
||||||
~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
In some cases it is necessary to remove a keyset entirely. This can be done with the `delete_key` command. Here it is
|
|
||||||
important to understand that `delete_key` only removes one specific key from a specific keyset. This means that you
|
|
||||||
need to run a separate `delete_key` command for each key inside a keyset.
|
|
||||||
|
|
||||||
In the following example we assume that the Security Domain is selected and a secure channel is already established. We
|
|
||||||
intend to remove the keyset with KVN 112. This keyset consists of three keys.
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
pySIM-shell (SCP02[03]:00:MF/ADF.ISD)> delete_key --key-ver 112 --key-id 1
|
|
||||||
pySIM-shell (SCP02[03]:00:MF/ADF.ISD)> delete_key --key-ver 112 --key-id 2
|
|
||||||
pySIM-shell (SCP02[03]:00:MF/ADF.ISD)> delete_key --key-ver 112 --key-id 3
|
|
||||||
|
|
||||||
To verify that the keyset has been deleted properly, we can use the `get_data key_information` command to inspect the
|
|
||||||
current status of the installed keysets. We should see that the key with KVN 112 is no longer present.
|
|
||||||
|
|
||||||
|
|
||||||
Adding a keyset
|
|
||||||
~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
In the following we will discuss how to add an entirely new keyset. The procedure is almost identical with the key
|
|
||||||
rotation procedure we have already discussed and it is assumed that all details about the key rotation are understood.
|
|
||||||
In this section we will go into more detail and illustrate how to provision new 3DES, `AES128` and `AES256` keysets.
|
|
||||||
|
|
||||||
It is important to keep in mind that storage space on smartcard is a precious resource. In many cases the amount of
|
|
||||||
keysets that a Security Domain can store is limited. In some situations you may be forced to sacrifice one of your
|
|
||||||
existing keysets in favor of a new keyset.
|
|
||||||
|
|
||||||
The main difference between key rotation and the adding of new keys is that we do not simply replace an existing key.
|
|
||||||
Instead an entirely new key is programmed into the Security Domain. Therefore the `put_key` commandline will have no
|
|
||||||
`--old-key-version-nr` parameter. From the commandline perspective, this is already the only visible difference from a
|
|
||||||
commandline that simply rotates a keyset. Since we are writing an entirely new keyset, we are free to chose the
|
|
||||||
algorithm and the key length within the parameter range permitted by the targeted secure channel protocol. Otherwise
|
|
||||||
the same rules apply.
|
|
||||||
|
|
||||||
For reference, it should be mentioned that it is also possible to add or rotate keyset using multiple `put_key`
|
|
||||||
commandlines. In this case one `put_key` commandline for each key is used. Each commandline will specify `--key-id` and
|
|
||||||
`--key-version-nr` and one `--key-type` and `--key-data` tuple. However, when rotating or adding a keyset step-by-step,
|
|
||||||
the whole process happens in a `non-atomic` way, which is less reliable. Therefore we will favor the `atomic method`
|
|
||||||
|
|
||||||
In the following examples we assume that the Security Domain is selected and a secure channel is already established.
|
|
||||||
|
|
||||||
|
|
||||||
Example: `3DES` key for `SCP02`
|
|
||||||
-------------------------------
|
|
||||||
|
|
||||||
Let's assume we want to provision a new 3DES keyset that we can use for SCP02. The keyset shall look like this:
|
|
||||||
|
|
||||||
+----------------+---------+----------------------------------+
|
|
||||||
| Key Identifier | Keyname | Keyvalue |
|
|
||||||
+================+=========+==================================+
|
|
||||||
| 1 | ENC/KIC | 542C37A6043679F2F9F71116418B1CD5 |
|
|
||||||
+----------------+---------+----------------------------------+
|
|
||||||
| 2 | MAC/KID | 34F11BAC8E5390B57F4E601372339E3C |
|
|
||||||
+----------------+---------+----------------------------------+
|
|
||||||
| 3 | DEK/KIK | 5524F4BECFE96FB63FC29D6BAAC6058B |
|
|
||||||
+----------------+---------+----------------------------------+
|
|
||||||
|
|
||||||
The keyset shall be a associated with the KVN 46. We have made sure before that KVN 46 is still unused and that this
|
|
||||||
KVN number is actually suitable for SCP02 keys. As we are using 3DES, it is obvious that we have to pass 3 keys with 16
|
|
||||||
byte length.
|
|
||||||
|
|
||||||
To program the key, we may use the following commandline. As we can see, this commandline is almost the exact same as
|
|
||||||
the one from the key rotation example where we were rotating a 3DES key. The only difference is that we didn't specify
|
|
||||||
an old KVN number and that we have chosen a different KVN.
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
pySIM-shell (SCP02[03]:00:MF/ADF.ISD)> put_key --key-id 1 --key-type des --key-data 542C37A6043679F2F9F71116418B1CD5 --key-type des --key-data 34F11BAC8E5390B57F4E601372339E3C --key-type des --key-data 5524F4BECFE96FB63FC29D6BAAC6058B --key-version-nr 46
|
|
||||||
|
|
||||||
In case of success, the keyset should appear in the `key_information` among the other keysets that are already present.
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
pySIM-shell (SCP02[03]:00:MF/ADF.ISD)> get_data key_information
|
|
||||||
{
|
|
||||||
"key_information": [
|
|
||||||
{
|
|
||||||
"key_information_data": {
|
|
||||||
"key_identifier": 1,
|
|
||||||
"key_version_number": 46,
|
|
||||||
"key_types": [
|
|
||||||
{
|
|
||||||
"type": "des",
|
|
||||||
"length": 16
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"key_information_data": {
|
|
||||||
"key_identifier": 2,
|
|
||||||
"key_version_number": 46,
|
|
||||||
"key_types": [
|
|
||||||
{
|
|
||||||
"type": "des",
|
|
||||||
"length": 16
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"key_information_data": {
|
|
||||||
"key_identifier": 3,
|
|
||||||
"key_version_number": 46,
|
|
||||||
"key_types": [
|
|
||||||
{
|
|
||||||
"type": "des",
|
|
||||||
"length": 16
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
...
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
Example: `AES128` key for `SCP80`
|
|
||||||
---------------------------------
|
|
||||||
|
|
||||||
In this example we intend to provision a new `AES128` keyset that we can use with SCP80 (OTA SMS). The keyset shall look
|
|
||||||
like this:
|
|
||||||
|
|
||||||
+----------------+---------+----------------------------------+
|
|
||||||
| Key Identifier | Keyname | Keyvalue |
|
|
||||||
+================+=========+==================================+
|
|
||||||
| 1 | ENC/KIC | 542C37A6043679F2F9F71116418B1CD5 |
|
|
||||||
+----------------+---------+----------------------------------+
|
|
||||||
| 2 | MAC/KID | 34F11BAC8E5390B57F4E601372339E3C |
|
|
||||||
+----------------+---------+----------------------------------+
|
|
||||||
| 3 | DEK/KIK | 5524F4BECFE96FB63FC29D6BAAC6058B |
|
|
||||||
+----------------+---------+----------------------------------+
|
|
||||||
|
|
||||||
In addition to that, we want to associate this key with KVN 3. We have inspected the currently installed keysets before
|
|
||||||
and made sure that KVN 3 is still unused. We are also aware that for SCP80 we may only use KVN values from 1 to 15.
|
|
||||||
|
|
||||||
For `AES128`, we specify the algorithm using the `--key-type aes` parameter. The selection between `AES128` and `AES256` is
|
|
||||||
done implicitly using the key length. Since we want to use `AES128` in this case, all three keys have a length of 16 byte.
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
pySIM-shell (SCP02[03]:00:MF/ADF.ISD)> put_key --key-id 1 --key-type aes --key-data 542C37A6043679F2F9F71116418B1CD5 --key-type aes --key-data 34F11BAC8E5390B57F4E601372339E3C --key-type aes --key-data 5524F4BECFE96FB63FC29D6BAAC6058B --key-version-nr 3
|
|
||||||
|
|
||||||
In case of success, the keyset should appear in the `key_information` among the other keysets that are already present.
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
pySIM-shell (SCP02[03]:00:MF/ADF.ISD)> get_data key_information
|
|
||||||
{
|
|
||||||
"key_information": [
|
|
||||||
{
|
|
||||||
"key_information_data": {
|
|
||||||
"key_identifier": 1,
|
|
||||||
"key_version_number": 3,
|
|
||||||
"key_types": [
|
|
||||||
{
|
|
||||||
"type": "aes",
|
|
||||||
"length": 16
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"key_information_data": {
|
|
||||||
"key_identifier": 2,
|
|
||||||
"key_version_number": 3,
|
|
||||||
"key_types": [
|
|
||||||
{
|
|
||||||
"type": "aes",
|
|
||||||
"length": 16
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"key_information_data": {
|
|
||||||
"key_identifier": 3,
|
|
||||||
"key_version_number": 3,
|
|
||||||
"key_types": [
|
|
||||||
{
|
|
||||||
"type": "aes",
|
|
||||||
"length": 16
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
...
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
Example: `AES256` key for `SCP03`
|
|
||||||
---------------------------------
|
|
||||||
|
|
||||||
Let's assume we want to provision a new `AES256` keyset that we can use for SCP03. The keyset shall look like this:
|
|
||||||
|
|
||||||
+----------------+---------+------------------------------------------------------------------+
|
|
||||||
| Key Identifier | Keyname | Keyvalue |
|
|
||||||
+================+=========+==================================================================+
|
|
||||||
| 1 | ENC/KIC | 542C37A6043679F2F9F71116418B1CD5542C37A6043679F2F9F71116418B1CD5 |
|
|
||||||
+----------------+---------+------------------------------------------------------------------+
|
|
||||||
| 2 | MAC/KID | 34F11BAC8E5390B57F4E601372339E3C34F11BAC8E5390B57F4E601372339E3C |
|
|
||||||
+----------------+---------+------------------------------------------------------------------+
|
|
||||||
| 3 | DEK/KIK | 5524F4BECFE96FB63FC29D6BAAC6058B5524F4BECFE96FB63FC29D6BAAC6058B |
|
|
||||||
+----------------+---------+------------------------------------------------------------------+
|
|
||||||
|
|
||||||
In addition to that, we assume that we want to associate this key with KVN 51. This KVN number falls in the range of
|
|
||||||
48 - 63 and is therefore suitable for a key that shall be usable with SCP03. We also made sure before that KVN 51 is
|
|
||||||
still unused.
|
|
||||||
|
|
||||||
With that we can go ahead and make up the following commandline:
|
|
||||||
::
|
|
||||||
|
|
||||||
pySIM-shell (SCP02[03]:00:MF/ADF.ISD)> put_key --key-id 1 --key-type aes --key-data 542C37A6043679F2F9F71116418B1CD5542C37A6043679F2F9F71116418B1CD5 --key-type aes --key-data 34F11BAC8E5390B57F4E601372339E3C34F11BAC8E5390B57F4E601372339E3C --key-type aes --key-data 5524F4BECFE96FB63FC29D6BAAC6058B5524F4BECFE96FB63FC29D6BAAC6058B --key-version-nr 51
|
|
||||||
|
|
||||||
In case of success, we should see the keyset in the `key_information`
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
pySIM-shell (SCP02[03]:00:MF/ADF.ISD)> get_data key_information
|
|
||||||
{
|
|
||||||
"key_information": [
|
|
||||||
{
|
|
||||||
"key_information_data": {
|
|
||||||
"key_identifier": 1,
|
|
||||||
"key_version_number": 51,
|
|
||||||
"key_types": [
|
|
||||||
{
|
|
||||||
"type": "aes",
|
|
||||||
"length": 32
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"key_information_data": {
|
|
||||||
"key_identifier": 2,
|
|
||||||
"key_version_number": 51,
|
|
||||||
"key_types": [
|
|
||||||
{
|
|
||||||
"type": "aes",
|
|
||||||
"length": 32
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"key_information_data": {
|
|
||||||
"key_identifier": 3,
|
|
||||||
"key_version_number": 51,
|
|
||||||
"key_types": [
|
|
||||||
{
|
|
||||||
"type": "aes",
|
|
||||||
"length": 32
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
...
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
Example: `AES128` key for `SCP81`
|
|
||||||
---------------------------------
|
|
||||||
|
|
||||||
In this example we will show how to provision a new `AES128` keyset for `SCP81`. We will provision this keyset under
|
|
||||||
KVN 64. The keyset we intend to apply shall look like this:
|
|
||||||
|
|
||||||
+----------------+---------+----------------------------------+
|
|
||||||
| Key Identifier | Keyname | Keyvalue |
|
|
||||||
+================+=========+==================================+
|
|
||||||
| 1 | TLS-PSK | 000102030405060708090a0b0c0d0e0f |
|
|
||||||
+----------------+---------+----------------------------------+
|
|
||||||
| 2 | DEK/KIK | 000102030405060708090a0b0c0d0e0f |
|
|
||||||
+----------------+---------+----------------------------------+
|
|
||||||
|
|
||||||
With that we can put together the following command line:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
put_key --key-id 1 --key-type tls_psk --key-data 000102030405060708090a0b0c0d0e0f --key-type aes --key-data 000102030405060708090a0b0c0d0e0f --key-version-nr 64
|
|
||||||
|
|
||||||
In case of success, the keyset should appear in the `key_information` as follows:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
pySIM-shell (SCP03[03]:00:MF/ADF.ISD-R)> get_data key_information
|
|
||||||
{
|
|
||||||
"key_information": [
|
|
||||||
...,
|
|
||||||
{
|
|
||||||
"key_information_data": {
|
|
||||||
"key_identifier": 2,
|
|
||||||
"key_version_number": 64,
|
|
||||||
"key_types": [
|
|
||||||
{
|
|
||||||
"type": "aes",
|
|
||||||
"length": 16
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"key_information_data": {
|
|
||||||
"key_identifier": 1,
|
|
||||||
"key_version_number": 64,
|
|
||||||
"key_types": [
|
|
||||||
{
|
|
||||||
"type": "tls_psk",
|
|
||||||
"length": 16
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
@@ -1,267 +0,0 @@
|
|||||||
"""
|
|
||||||
Sphinx extension: auto-generate docs/filesystem.rst from the pySim EF class hierarchy.
|
|
||||||
|
|
||||||
Hooked into Sphinx's ``builder-inited`` event so the file is always regenerated
|
|
||||||
from the live Python classes before Sphinx reads any source files.
|
|
||||||
|
|
||||||
The table of root objects to document is in SECTIONS near the top of this file.
|
|
||||||
EXCLUDED lists CardProfile/CardApplication subclasses intentionally omitted from
|
|
||||||
SECTIONS, with reasons. Both tables are read by tests/unittests/test_fs_coverage.py
|
|
||||||
to ensure every class with EF/DF content is accounted for.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import importlib
|
|
||||||
import inspect
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import textwrap
|
|
||||||
|
|
||||||
# Ensure pySim is importable when this module is loaded as a Sphinx extension
|
|
||||||
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
|
|
||||||
|
|
||||||
from pySim.filesystem import (CardApplication, CardDF, CardMF, CardEF, # noqa: E402
|
|
||||||
TransparentEF, TransRecEF, LinFixedEF, CyclicEF, BerTlvEF)
|
|
||||||
from pySim.profile import CardProfile # noqa: E402
|
|
||||||
|
|
||||||
|
|
||||||
# Generic EF base classes whose docstrings describe the *type* of file
|
|
||||||
# (Transparent, LinFixed, ...) rather than a specific file's content.
|
|
||||||
# Suppress those boilerplate texts in the per-EF entries; they are only
|
|
||||||
# useful once, at the top of the document or in a dedicated glossary.
|
|
||||||
_EF_BASE_TYPES = frozenset([TransparentEF,
|
|
||||||
TransRecEF,
|
|
||||||
LinFixedEF,
|
|
||||||
CyclicEF,
|
|
||||||
BerTlvEF])
|
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
# Sections: (heading, module, class-name)
|
|
||||||
# The class must be either a CardProfile (uses .files_in_mf) or a CardDF
|
|
||||||
# subclass (uses .children).
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
SECTIONS = [
|
|
||||||
('MF / TS 102 221 (UICC)',
|
|
||||||
'pySim.ts_102_221', 'CardProfileUICC'),
|
|
||||||
('ADF.USIM / TS 31.102',
|
|
||||||
'pySim.ts_31_102', 'ADF_USIM'),
|
|
||||||
('ADF.ISIM / TS 31.103',
|
|
||||||
'pySim.ts_31_103', 'ADF_ISIM'),
|
|
||||||
('ADF.HPSIM / TS 31.104',
|
|
||||||
'pySim.ts_31_104', 'ADF_HPSIM'),
|
|
||||||
('DF.GSM + DF.TELECOM / TS 51.011 (SIM)',
|
|
||||||
'pySim.ts_51_011', 'CardProfileSIM'),
|
|
||||||
('CDMA / IS-820 (RUIM)',
|
|
||||||
'pySim.cdma_ruim', 'CardProfileRUIM'),
|
|
||||||
('DF.EIRENE / GSM-R',
|
|
||||||
'pySim.gsm_r', 'DF_EIRENE'),
|
|
||||||
('DF.SYSTEM / sysmocom SJA2+SJA5',
|
|
||||||
'pySim.sysmocom_sja2', 'DF_SYSTEM'),
|
|
||||||
]
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
# Excluded: {(module, class-name)}
|
|
||||||
# CardProfile and CardApplication subclasses that have EF/DF children but are
|
|
||||||
# intentionally absent from SECTIONS. Keeping this list explicit lets
|
|
||||||
# test_fs_coverage.py detect newly added classes that the developer forgot to
|
|
||||||
# add to either table.
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
EXCLUDED = {
|
|
||||||
# eUICC profiles inherit files_in_mf verbatim from CardProfileUICC; the
|
|
||||||
# eUICC-specific content lives in ISD-R / ISD-P applications, not in MF.
|
|
||||||
('pySim.euicc', 'CardProfileEuiccSGP02'),
|
|
||||||
('pySim.euicc', 'CardProfileEuiccSGP22'),
|
|
||||||
('pySim.euicc', 'CardProfileEuiccSGP32'),
|
|
||||||
# CardApplication* classes are thin wrappers that embed an ADF_* instance.
|
|
||||||
# The ADF contents are already documented via the corresponding ADF_* entry
|
|
||||||
# in SECTIONS above.
|
|
||||||
('pySim.ts_31_102', 'CardApplicationUSIM'),
|
|
||||||
('pySim.ts_31_102', 'CardApplicationUSIMnonIMSI'),
|
|
||||||
('pySim.ts_31_103', 'CardApplicationISIM'),
|
|
||||||
('pySim.ts_31_104', 'CardApplicationHPSIM'),
|
|
||||||
}
|
|
||||||
|
|
||||||
# RST underline characters ordered by nesting depth
|
|
||||||
_HEADING_CHARS = ['=', '=', '-', '~', '^', '"']
|
|
||||||
# Level 0 uses '=' with overline (page title).
|
|
||||||
# Level 1 uses '=' without overline (major sections).
|
|
||||||
# Levels 2+ use the remaining characters for DFs.
|
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
# RST formatting helpers
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
def _heading(title: str, level: int) -> str:
|
|
||||||
"""Return an RST heading string. Level 0 gets an overline."""
|
|
||||||
char = _HEADING_CHARS[level]
|
|
||||||
rule = char * len(title)
|
|
||||||
if level == 0:
|
|
||||||
return f'{rule}\n{title}\n{rule}\n\n'
|
|
||||||
return f'{title}\n{rule}\n\n'
|
|
||||||
|
|
||||||
|
|
||||||
def _json_default(obj):
|
|
||||||
"""Fallback serialiser: bytes -> hex, anything else -> repr."""
|
|
||||||
if isinstance(obj, (bytes, bytearray)):
|
|
||||||
return obj.hex()
|
|
||||||
return repr(obj)
|
|
||||||
|
|
||||||
|
|
||||||
def _examples_block(cls) -> str:
|
|
||||||
"""Return RST code-block examples (one per vector), or '' if none exist.
|
|
||||||
|
|
||||||
Each example is rendered as a ``json5`` code-block with the hex-encoded
|
|
||||||
binary as a ``// comment`` on the first line, followed by the decoded JSON.
|
|
||||||
``json5`` is used instead of ``json`` so that Pygments does not flag the
|
|
||||||
``//`` comment as a syntax error.
|
|
||||||
"""
|
|
||||||
vectors = []
|
|
||||||
for attr in ('_test_de_encode', '_test_decode'):
|
|
||||||
v = getattr(cls, attr, None)
|
|
||||||
if v:
|
|
||||||
vectors.extend(v)
|
|
||||||
if not vectors:
|
|
||||||
return ''
|
|
||||||
|
|
||||||
lines = ['**Examples**\n\n']
|
|
||||||
|
|
||||||
for t in vectors:
|
|
||||||
# 2-tuple: (encoded, decoded)
|
|
||||||
# 3-tuple: (encoded, record_nr, decoded) — LinFixedEF / CyclicEF
|
|
||||||
if len(t) >= 3:
|
|
||||||
encoded, record_nr, decoded = t[0], t[1], t[2]
|
|
||||||
comment = f'record {record_nr}: {encoded.lower()}'
|
|
||||||
else:
|
|
||||||
encoded, decoded = t[0], t[1]
|
|
||||||
comment = f'file: {encoded.lower()}'
|
|
||||||
|
|
||||||
json_str = json.dumps(decoded, default=_json_default, indent=2)
|
|
||||||
json_indented = textwrap.indent(json_str, ' ')
|
|
||||||
|
|
||||||
lines.append('.. code-block:: json5\n\n')
|
|
||||||
lines.append(f' // {comment}\n')
|
|
||||||
lines.append(json_indented + '\n')
|
|
||||||
lines.append('\n')
|
|
||||||
|
|
||||||
return ''.join(lines)
|
|
||||||
|
|
||||||
|
|
||||||
def _document_ef(ef: CardEF) -> str:
|
|
||||||
"""Return RST for a single EF. Uses ``rubric`` to stay out of the TOC."""
|
|
||||||
cls = type(ef)
|
|
||||||
|
|
||||||
parts = [ef.fully_qualified_path_str()]
|
|
||||||
if ef.fid:
|
|
||||||
parts.append(f'({ef.fid.upper()})')
|
|
||||||
if ef.desc:
|
|
||||||
parts.append(f'\u2014 {ef.desc}') # em-dash
|
|
||||||
title = ' '.join(parts)
|
|
||||||
|
|
||||||
lines = [f'.. rubric:: {title}\n\n']
|
|
||||||
|
|
||||||
# Only show a docstring if it is specific to this class. EFs that are
|
|
||||||
# direct instances of a base type (TransparentEF, LinFixedEF, ...) carry
|
|
||||||
# only the generic "what is a TransparentEF" boilerplate; named subclasses
|
|
||||||
# without their own __doc__ have cls.__dict__['__doc__'] == None. Either
|
|
||||||
# way, suppress the text here - it belongs at the document level, not
|
|
||||||
# repeated for every single EF entry.
|
|
||||||
doc = None if cls in _EF_BASE_TYPES else cls.__dict__.get('__doc__')
|
|
||||||
if doc:
|
|
||||||
lines.append(inspect.cleandoc(doc) + '\n\n')
|
|
||||||
|
|
||||||
examples = _examples_block(cls)
|
|
||||||
if examples:
|
|
||||||
lines.append(examples)
|
|
||||||
|
|
||||||
return ''.join(lines)
|
|
||||||
|
|
||||||
|
|
||||||
def _document_df(df: CardDF, level: int) -> str:
|
|
||||||
"""Return RST for a DF section and all its children, recursively."""
|
|
||||||
parts = [df.fully_qualified_path_str()]
|
|
||||||
if df.fid:
|
|
||||||
parts.append(f'({df.fid.upper()})')
|
|
||||||
if df.desc:
|
|
||||||
parts.append(f'\u2014 {df.desc}') # em-dash
|
|
||||||
title = ' '.join(parts)
|
|
||||||
|
|
||||||
lines = [_heading(title, level)]
|
|
||||||
|
|
||||||
cls = type(df)
|
|
||||||
doc = None if cls in (CardDF, CardMF) else cls.__dict__.get('__doc__')
|
|
||||||
if doc:
|
|
||||||
lines.append(inspect.cleandoc(doc) + '\n\n')
|
|
||||||
|
|
||||||
for child in df.children.values():
|
|
||||||
if isinstance(child, CardDF):
|
|
||||||
lines.append(_document_df(child, level + 1))
|
|
||||||
elif isinstance(child, CardEF):
|
|
||||||
lines.append(_document_ef(child))
|
|
||||||
|
|
||||||
return ''.join(lines)
|
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
# Top-level generator
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
def generate_filesystem_rst() -> str:
|
|
||||||
"""Walk all registered sections and return the full RST document as a string."""
|
|
||||||
out = [
|
|
||||||
'.. This file is auto-generated by docs/pysim_fs_sphinx.py — do not edit.\n\n',
|
|
||||||
_heading('Card Filesystem Reference', 0),
|
|
||||||
'This page documents all Elementary Files (EFs) and Dedicated Files (DFs) '
|
|
||||||
'implemented in pySim, organised by their location in the card filesystem.\n\n',
|
|
||||||
]
|
|
||||||
|
|
||||||
# Track already-documented classes so that DFs/EFs shared between profiles
|
|
||||||
# (e.g. DF.TELECOM / DF.GSM present in both CardProfileSIM and CardProfileRUIM)
|
|
||||||
# are only emitted once.
|
|
||||||
seen_types: set = set()
|
|
||||||
|
|
||||||
for section_title, module_path, class_name in SECTIONS:
|
|
||||||
module = importlib.import_module(module_path)
|
|
||||||
cls = getattr(module, class_name)
|
|
||||||
obj = cls()
|
|
||||||
|
|
||||||
if isinstance(obj, CardProfile):
|
|
||||||
files = obj.files_in_mf
|
|
||||||
elif isinstance(obj, CardApplication):
|
|
||||||
files = list(obj.adf.children.values())
|
|
||||||
elif isinstance(obj, CardDF):
|
|
||||||
files = list(obj.children.values())
|
|
||||||
else:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Filter out files whose class was already documented in an earlier section.
|
|
||||||
files = [f for f in files if type(f) not in seen_types]
|
|
||||||
if not files:
|
|
||||||
continue
|
|
||||||
|
|
||||||
out.append(_heading(section_title, 1))
|
|
||||||
|
|
||||||
for f in files:
|
|
||||||
seen_types.add(type(f))
|
|
||||||
if isinstance(f, CardDF):
|
|
||||||
out.append(_document_df(f, level=2))
|
|
||||||
elif isinstance(f, CardEF):
|
|
||||||
out.append(_document_ef(f))
|
|
||||||
|
|
||||||
return ''.join(out)
|
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
# Sphinx integration
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
def _on_builder_inited(app):
|
|
||||||
output_path = os.path.join(app.srcdir, 'filesystem.rst')
|
|
||||||
with open(output_path, 'w') as fh:
|
|
||||||
fh.write(generate_filesystem_rst())
|
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
|
||||||
app.connect('builder-inited', _on_builder_inited)
|
|
||||||
return {'version': '0.1', 'parallel_read_safe': True}
|
|
||||||
@@ -67,7 +67,7 @@ Inspecting applications
|
|||||||
|
|
||||||
To inspect the application PE contents of an existing profile package, sub-command `info` with parameter '--apps' can
|
To inspect the application PE contents of an existing profile package, sub-command `info` with parameter '--apps' can
|
||||||
be used. This command lists out all application and their parameters in detail. This allows an application developer
|
be used. This command lists out all application and their parameters in detail. This allows an application developer
|
||||||
to check if the applet insertion was carried out as expected.
|
to check if the applet insertaion was carried out as expected.
|
||||||
|
|
||||||
Example: Listing applications and their parameters
|
Example: Listing applications and their parameters
|
||||||
::
|
::
|
||||||
|
|||||||
@@ -68,7 +68,7 @@ Usage Examples
|
|||||||
|
|
||||||
suci-tutorial
|
suci-tutorial
|
||||||
cap-tutorial
|
cap-tutorial
|
||||||
put_key-tutorial
|
|
||||||
|
|
||||||
Advanced Topics
|
Advanced Topics
|
||||||
---------------
|
---------------
|
||||||
@@ -602,8 +602,8 @@ This allows for easy interactive modification of records.
|
|||||||
If this command fails before the editor is spawned, it means that the current record contents is not decodable,
|
If this command fails before the editor is spawned, it means that the current record contents is not decodable,
|
||||||
and you should use the :ref:`update_record_decoded` or :ref:`update_record` command.
|
and you should use the :ref:`update_record_decoded` or :ref:`update_record` command.
|
||||||
|
|
||||||
If this command fails after making your modifications in the editor, it means that the new file contents is not
|
If this command fails after making your modificatiosn in the editor, it means that the new file contents is not
|
||||||
encodable; please check your input and/or use the raw :ref:`update_record` command.
|
encodable; please check your input and/or us the raw :ref:`update_record` comamdn.
|
||||||
|
|
||||||
|
|
||||||
decode_hex
|
decode_hex
|
||||||
@@ -708,8 +708,8 @@ This allows for easy interactive modification of file contents.
|
|||||||
If this command fails before the editor is spawned, it means that the current file contents is not decodable,
|
If this command fails before the editor is spawned, it means that the current file contents is not decodable,
|
||||||
and you should use the :ref:`update_binary_decoded` or :ref:`update_binary` command.
|
and you should use the :ref:`update_binary_decoded` or :ref:`update_binary` command.
|
||||||
|
|
||||||
If this command fails after making your modifications in the editor, it means that the new file contents is not
|
If this command fails after making your modificatiosn in the editor, it means that the new file contents is not
|
||||||
encodable; please check your input and/or use the raw :ref:`update_binary` command.
|
encodable; please check your input and/or us the raw :ref:`update_binary` comamdn.
|
||||||
|
|
||||||
|
|
||||||
decode_hex
|
decode_hex
|
||||||
|
|||||||
@@ -1,179 +0,0 @@
|
|||||||
smpp-ota-tool
|
|
||||||
=============
|
|
||||||
|
|
||||||
The `smpp-ota-tool` allows users to send OTA SMS messages containing APDU scripts (RFM, RAM) via an SMPP server. The
|
|
||||||
intended audience are developers who want to test/evaluate the OTA SMS interface of a SIM/UICC/eUICC. `smpp-ota-tool`
|
|
||||||
is intended to be used as a companion tool for :ref:`pySim-smpp2sim`, however it should be usable on any other SMPP
|
|
||||||
server (such as a production SMSC of a live cellular network) as well.
|
|
||||||
|
|
||||||
From the technical perspective `smpp-ota-tool` takes the role of an SMPP ESME. It takes care of the encoding, encryption
|
|
||||||
and checksumming (signing) of the RFM/RAM OTA SMS and eventually submits it to the SMPP server. The program then waits
|
|
||||||
for a response. The response is automatically parsed and printed on stdout. This makes the program also suitable to be
|
|
||||||
called from shell scripts.
|
|
||||||
|
|
||||||
.. note:: In the following we will we will refer to `SIM` as one of the following: `SIM`, `USIM`, `ISIM`, `UICC`,
|
|
||||||
`eUICC`, `eSIM`.
|
|
||||||
|
|
||||||
Applying OTA keys
|
|
||||||
~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
Depending on the `SIM` type you will receive one or more sets of keys which you can use to communicate with the `SIM`
|
|
||||||
through a secure channel protocol. When using the OTA SMS method, the SCP80 protocol is used and it therefore crucial
|
|
||||||
to use a keyset that is actually suitable for SCP80.
|
|
||||||
|
|
||||||
A keyset usually consists of three keys:
|
|
||||||
|
|
||||||
#. KIC: the key used for ciphering (encryption/decryption)
|
|
||||||
#. KID: the key used to compute a cryptographic checksum (signing)
|
|
||||||
#. KIK: the key used to encrypt/decrypt key material (key rotation, adding of new keys)
|
|
||||||
|
|
||||||
From the transport security perspective, only KIC and KID are relevant. The KIK (also referenced as "Data Encryption
|
|
||||||
Key", DEK) is only used when keys are rotated or new keys are added (see also ETSI TS 102 226, section 8.2.1.5).
|
|
||||||
|
|
||||||
When the keyset is programmed into the security domain of the `SIM`, it is tied to a specific cryptographic algorithm
|
|
||||||
(3DES, AES128 or AES256) and a so called Key Version Number (KVN). The term "Key Version Number" is misleading, since
|
|
||||||
it is actually not a version number. It is a unique identifier of a certain keyset which also identifies for which
|
|
||||||
secure channel protocol the keyset may be used. Keysets with a KVN from 1-15 (``0x01``-``0x0F``) are suitable for SCP80.
|
|
||||||
This means that it is not only important to know just the KIC/KID/KIK keys. Also the related algorithms and the KVN
|
|
||||||
numbers must be known.
|
|
||||||
|
|
||||||
.. note:: SCP80 keysets typically start counting from 1 upwards. Typical configurations use a set of 3 keysets with
|
|
||||||
KVN numbers 1-3.
|
|
||||||
|
|
||||||
Addressing an Application
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
When communicating with a specific application on a `SIM` via SCP80, it is important to address that application with
|
|
||||||
the correct parameters. The following two parameters must be known in advance:
|
|
||||||
|
|
||||||
#. TAR: The Toolkit Application Reference (TAR) number is a three byte value that uniquely addresses an application
|
|
||||||
on the `SIM`. The exact values may vary (see also ETSI TS 101 220, Table D.1).
|
|
||||||
#. MSL: The Minimum Security Level (MSL) is a bit-field that dictates which of the security measures encoded in the
|
|
||||||
SPI are mandatory (see also ETSI TS 102 225, section 5.1.1).
|
|
||||||
|
|
||||||
A practical example
|
|
||||||
~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
.. note:: This tutorial assumes that pySim-smpp2sim is running on the local machine with its default parameters.
|
|
||||||
See also :ref:`pySim-smpp2sim`.
|
|
||||||
|
|
||||||
Let's assume that an OTA SMS shall be sent to the SIM RFM application of an sysmoISIM-SJA2. What we want to do is to
|
|
||||||
select DF.GSM and to get the select response back.
|
|
||||||
|
|
||||||
We have received the following key material from the `SIM` vendor:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
KIC1: F09C43EE1A0391665CC9F05AF4E0BD10
|
|
||||||
KID1: 01981F4A20999F62AF99988007BAF6CA
|
|
||||||
KIK1: 8F8AEE5CDCC5D361368BC45673D99195
|
|
||||||
KIC2: 01022916E945B656FDE03F806A105FA2
|
|
||||||
KID2: D326CB69F160333CC5BD1495D448EFD6
|
|
||||||
KIK2: 08037E0590DFE049D4975FFB8652F625
|
|
||||||
KIC3: 2B22824D0D27A3A1CEEC512B312082B4
|
|
||||||
KID3: F1697766925A11F4458295590137B672
|
|
||||||
KIK3: C7EE69B2C5A1C8E160DD36A38EB517B3
|
|
||||||
|
|
||||||
Those are three keysets. The enumeration is directly equal to the KVN used. All three keysets are 3DES keys, which
|
|
||||||
means triple_des_cbc2 is the correct algorithm to use.
|
|
||||||
|
|
||||||
.. note:: The key set configuration can be confirmed by retrieving the key configuration using
|
|
||||||
`get_data key_information` from within an SCP02 session on ADF.ISD.
|
|
||||||
|
|
||||||
In this example we intend to address the SIM RFM application on the `SIM`. Which according to the manual has TAR ``B00010``
|
|
||||||
and MSL ``0x06``. When we hold ``0x06`` = ``0b00000110`` against the SPI coding chart (see also ETSI TS 102 225,
|
|
||||||
section 5.1.1). We can deduct that Ciphering and Cryptographic Checksum are mandatory.
|
|
||||||
|
|
||||||
.. note:: The MSL (see also ETSI TS 102 226, section 6.1) is assigned to an application by the `SIM` issuer. It is a
|
|
||||||
custom decision and may vary with different `SIM` types/profiles. In the case of sysmoISIM-SJS1/SJA2/SJA5 the
|
|
||||||
counter requirement has been waived to simplify lab/research type use. In productive environments, `SIM`
|
|
||||||
applications should ideally use an MSL that makes the counter mandatory.
|
|
||||||
|
|
||||||
In order to select DF.GSM (``0x7F20``) and to retrieve the select response, two APDUs are needed. The first APDU is the
|
|
||||||
select command ``A0A40000027F20`` and the second is the related get-response command ``A0C0000016``. Those APDUs will be
|
|
||||||
concatenated and are sent in a single message. The message containing the concatenated APDUs works as a script that
|
|
||||||
is received by the SIM RFM application and then executed. This method poses some limitations that have to be taken into
|
|
||||||
account when making requests like this (see also ETSI TS 102 226, section 5).
|
|
||||||
|
|
||||||
With this information we may now construct a commandline for `smpp-ota-tool.py`. We will pass the KVN as kid_idx and
|
|
||||||
kic_idx (see also ETSI TS 102 225, Table 2, fields `KIc` and `KID`). Both index values should refer to the same
|
|
||||||
keyset/KVN as keysets should not be mixed. (`smpp-ota-tool` still provides separate parameters anyway to allow testing
|
|
||||||
with invalid keyset combinations)
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
$ PYTHONPATH=./ ./contrib/smpp-ota-tool.py --kic F09C43EE1A0391665CC9F05AF4E0BD10 --kid 01981F4A20999F62AF99988107BAF6CA --kid_idx 1 --kic_idx 1 --algo-crypt triple_des_cbc2 --algo-auth triple_des_cbc2 --tar B00010 --apdu A0A40000027F20 --apdu A0C0000016
|
|
||||||
2026-02-26 17:13:56 INFO Connecting to localhost:2775...
|
|
||||||
2026-02-26 17:13:56 INFO C-APDU sending: a0a40000027f20a0c0000016...
|
|
||||||
2026-02-26 17:13:56 INFO SMS-TPDU sending: 02700000281506191515b00010da1d6cbbd0d11ce4330d844c7408340943e843f67a6d7b0674730881605fd62d...
|
|
||||||
2026-02-26 17:13:56 INFO SMS-TPDU sent, waiting for response...
|
|
||||||
2026-02-26 17:13:56 INFO SMS-TPDU received: 027100002c12b000107ddf58d1780f771638b3975759f4296cf5c31efc87a16a1b61921426baa16da1b5ba1a9951d59a39
|
|
||||||
2026-02-26 17:13:56 INFO SMS-TPDU decoded: (Container(rpl=44, rhl=18, tar=b'\xb0\x00\x10', cntr=b'\x00\x00\x00\x00\x00', pcntr=0, response_status=uEnumIntegerString.new(0, 'por_ok'), cc_rc=b'\x8f\xea\xf5.\xf4\x0e\xc2\x14', secured_data=b'\x02\x90\x00\x00\x00\xff\xff\x7f \x02\x00\x00\x00\x00\x00\t\xb1\x065\x04\x00\x83\x8a\x83\x8a'), Container(number_of_commands=2, last_status_word=u'9000', last_response_data=u'0000ffff7f2002000000000009b106350400838a838a'))
|
|
||||||
2026-02-26 17:13:56 INFO R-APDU received: 0000ffff7f2002000000000009b106350400838a838a 9000
|
|
||||||
0000ffff7f2002000000000009b106350400838a838a 9000
|
|
||||||
2026-02-26 17:13:56 INFO Disconnecting...
|
|
||||||
|
|
||||||
The result we see is the select response of DF.GSM and a status word indicating that the last command has been
|
|
||||||
processed normally.
|
|
||||||
|
|
||||||
As we can see, this mechanism now allows us to perform small administrative tasks remotely. We can read the contents of
|
|
||||||
files remotely or make changes to files. Depending on the changes we make, there may be security issues arising from
|
|
||||||
replay attacks. With the commandline above, the communication is encrypted and protected by a cryptographic checksum,
|
|
||||||
so an adversary can neither read, nor alter the message. However, an adversary could still replay an intercepted
|
|
||||||
message and the `SIM` would happily execute the contained APDUs again.
|
|
||||||
|
|
||||||
To prevent this, we may include a replay protection counter within the message. In this case, the MSL indicates that a
|
|
||||||
replay protection counter is not required. However, to extended the security of our messages, we may chose to use a
|
|
||||||
counter anyway. In the following example, we will encode a counter value of 100. We will instruct the `SIM` to make sure
|
|
||||||
that the value we send is higher than the counter value that is currently stored in the `SIM`.
|
|
||||||
|
|
||||||
To add a replay connection counter we add the commandline arguments `--cntr-req` to set the counter requirement and
|
|
||||||
`--cntr` to pass the counter value.
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
$ PYTHONPATH=./ ./contrib/smpp-ota-tool.py --kic F09C43EE1A0391665CC9F05AF4E0BD10 --kid 01981F4A20999F62AF99988107BAF6CA --kid_idx 1 --kic_idx 1 --algo-crypt triple_des_cbc2 --algo-auth triple_des_cbc2 --tar B00010 --apdu A0A40000027F20 --apdu A0C0000016 --cntr-req counter_must_be_higher --cntr 100
|
|
||||||
2026-02-26 17:16:39 INFO Connecting to localhost:2775...
|
|
||||||
2026-02-26 17:16:39 INFO C-APDU sending: a0a40000027f20a0c0000016...
|
|
||||||
2026-02-26 17:16:39 INFO SMS-TPDU sending: 02700000281516191515b000103a4f599e94f2b5dcfbbda984761b7977df6514c57a580fb4844787c436d2eade...
|
|
||||||
2026-02-26 17:16:39 INFO SMS-TPDU sent, waiting for response...
|
|
||||||
2026-02-26 17:16:39 INFO SMS-TPDU received: 027100002c12b0001049fb0315f6c6401b553867f412cefaf9355b38271178edb342a3bc9cc7e670cdc1f45eea6ffcbb39
|
|
||||||
2026-02-26 17:16:39 INFO SMS-TPDU decoded: (Container(rpl=44, rhl=18, tar=b'\xb0\x00\x10', cntr=b'\x00\x00\x00\x00d', pcntr=0, response_status=uEnumIntegerString.new(0, 'por_ok'), cc_rc=b'\xa9/\xc7\xc9\x00"\xab5', secured_data=b'\x02\x90\x00\x00\x00\xff\xff\x7f \x02\x00\x00\x00\x00\x00\t\xb1\x065\x04\x00\x83\x8a\x83\x8a'), Container(number_of_commands=2, last_status_word=u'9000', last_response_data=u'0000ffff7f2002000000000009b106350400838a838a'))
|
|
||||||
2026-02-26 17:16:39 INFO R-APDU received: 0000ffff7f2002000000000009b106350400838a838a 9000
|
|
||||||
0000ffff7f2002000000000009b106350400838a838a 9000
|
|
||||||
2026-02-26 17:16:39 INFO Disconnecting...
|
|
||||||
|
|
||||||
The `SIM` has accepted the message. The message got processed and the `SIM` has set its internal to 100. As an experiment,
|
|
||||||
we may try to re-use the counter value:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
$ PYTHONPATH=./ ./contrib/smpp-ota-tool.py --kic F09C43EE1A0391665CC9F05AF4E0BD10 --kid 01981F4A20999F62AF99988107BAF6CA --kid_idx 1 --kic_idx 1 --algo-crypt triple_des_cbc2 --algo-auth triple_des_cbc2 --tar B00010 --apdu A0A40000027F20 --apdu A0C0000016 --cntr-req counter_must_be_higher --cntr 100
|
|
||||||
2026-02-26 17:16:43 INFO Connecting to localhost:2775...
|
|
||||||
2026-02-26 17:16:43 INFO C-APDU sending: a0a40000027f20a0c0000016...
|
|
||||||
2026-02-26 17:16:43 INFO SMS-TPDU sending: 02700000281516191515b000103a4f599e94f2b5dcfbbda984761b7977df6514c57a580fb4844787c436d2eade...
|
|
||||||
2026-02-26 17:16:43 INFO SMS-TPDU sent, waiting for response...
|
|
||||||
2026-02-26 17:16:43 INFO SMS-TPDU received: 027100000b0ab0001000000000000006
|
|
||||||
2026-02-26 17:16:43 INFO SMS-TPDU decoded: (Container(rpl=11, rhl=10, tar=b'\xb0\x00\x10', cntr=b'\x00\x00\x00\x00\x00', pcntr=0, response_status=uEnumIntegerString.new(6, 'undefined_security_error'), cc_rc=b'', secured_data=b''), None)
|
|
||||||
Traceback (most recent call last):
|
|
||||||
File "/home/user/work/git_master/pysim/./contrib/smpp-ota-tool.py", line 238, in <module>
|
|
||||||
resp, sw = smpp_handler.transceive_apdu(apdu, opts.src_addr, opts.dest_addr, opts.timeout)
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
File "/home/user/work/git_master/pysim/./contrib/smpp-ota-tool.py", line 162, in transceive_apdu
|
|
||||||
raise ValueError("Response does not contain any last_response_data, no R-APDU received!")
|
|
||||||
ValueError: Response does not contain any last_response_data, no R-APDU received!
|
|
||||||
2026-02-26 17:16:43 INFO Disconnecting...
|
|
||||||
|
|
||||||
As we can see, the `SIM` has rejected the message with an `undefined_security_error`. The replay-protection-counter
|
|
||||||
ensures that a message can only be sent once.
|
|
||||||
|
|
||||||
.. note:: The replay-protection-counter is implemented as a 5 byte integer value (see also ETSI TS 102 225, Table 3).
|
|
||||||
When the counter has reached its maximum, it will not overflow nor can it be reset.
|
|
||||||
|
|
||||||
smpp-ota-tool syntax
|
|
||||||
~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
.. argparse::
|
|
||||||
:module: contrib.smpp-ota-tool
|
|
||||||
:func: option_parser
|
|
||||||
:prog: contrib/smpp-ota-tool.py
|
|
||||||
@@ -55,5 +55,3 @@ And once your external program is sending SMS to the simulated SMSC, it will log
|
|||||||
SMSPPDownload(DeviceIdentities({'source_dev_id': 'network', 'dest_dev_id': 'uicc'}),Address({'ton_npi': 0, 'call_number': '0123456'}),SMS_TPDU({'tpdu': '400290217ff6227052000000002d02700000281516191212b0000127fa28a5bac69d3c5e9df2c7155dfdde449c826b236215566530787b30e8be5d'}))
|
SMSPPDownload(DeviceIdentities({'source_dev_id': 'network', 'dest_dev_id': 'uicc'}),Address({'ton_npi': 0, 'call_number': '0123456'}),SMS_TPDU({'tpdu': '400290217ff6227052000000002d02700000281516191212b0000127fa28a5bac69d3c5e9df2c7155dfdde449c826b236215566530787b30e8be5d'}))
|
||||||
INFO root: ENVELOPE: d147820283818604001032548b3b400290217ff6227052000000002d02700000281516191212b0000127fa28a5bac69d3c5e9df2c7155dfdde449c826b236215566530787b30e8be5d
|
INFO root: ENVELOPE: d147820283818604001032548b3b400290217ff6227052000000002d02700000281516191212b0000127fa28a5bac69d3c5e9df2c7155dfdde449c826b236215566530787b30e8be5d
|
||||||
INFO root: SW 9000: 027100002412b000019a551bb7c28183652de0ace6170d0e563c5e949a3ba56747fe4c1dbbef16642c
|
INFO root: SW 9000: 027100002412b000019a551bb7c28183652de0ace6170d0e563c5e949a3ba56747fe4c1dbbef16642c
|
||||||
|
|
||||||
.. note:: for sending OTA SMS messages :ref:`smpp-ota-tool` may be used.
|
|
||||||
|
|||||||
@@ -44,11 +44,6 @@ from pySim.legacy.ts_51_011 import EF
|
|||||||
from pySim.card_handler import *
|
from pySim.card_handler import *
|
||||||
from pySim.utils import *
|
from pySim.utils import *
|
||||||
|
|
||||||
from pathlib import Path
|
|
||||||
import logging
|
|
||||||
from pySim.log import PySimLogger
|
|
||||||
|
|
||||||
log = PySimLogger.get(Path(__file__).stem)
|
|
||||||
|
|
||||||
def parse_options():
|
def parse_options():
|
||||||
|
|
||||||
@@ -190,7 +185,6 @@ def parse_options():
|
|||||||
default=False, action="store_true")
|
default=False, action="store_true")
|
||||||
parser.add_argument("--card_handler", dest="card_handler_config", metavar="FILE",
|
parser.add_argument("--card_handler", dest="card_handler_config", metavar="FILE",
|
||||||
help="Use automatic card handling machine")
|
help="Use automatic card handling machine")
|
||||||
parser.add_argument("--verbose", help="Enable verbose logging", action='store_true', default=False)
|
|
||||||
|
|
||||||
options = parser.parse_args()
|
options = parser.parse_args()
|
||||||
|
|
||||||
@@ -776,9 +770,6 @@ if __name__ == '__main__':
|
|||||||
# Parse options
|
# Parse options
|
||||||
opts = parse_options()
|
opts = parse_options()
|
||||||
|
|
||||||
# Setup logger
|
|
||||||
PySimLogger.setup(print, {logging.WARN: "\033[33m"}, opts.verbose)
|
|
||||||
|
|
||||||
# Init card reader driver
|
# Init card reader driver
|
||||||
sl = init_reader(opts)
|
sl = init_reader(opts)
|
||||||
|
|
||||||
|
|||||||
@@ -25,6 +25,7 @@
|
|||||||
import hashlib
|
import hashlib
|
||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
|
import random
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
@@ -45,17 +46,11 @@ from pySim.utils import dec_imsi, dec_iccid
|
|||||||
from pySim.legacy.utils import format_xplmn_w_act, dec_st, dec_msisdn
|
from pySim.legacy.utils import format_xplmn_w_act, dec_st, dec_msisdn
|
||||||
from pySim.ts_51_011 import EF_SMSP
|
from pySim.ts_51_011 import EF_SMSP
|
||||||
|
|
||||||
from pathlib import Path
|
|
||||||
import logging
|
|
||||||
from pySim.log import PySimLogger
|
|
||||||
|
|
||||||
log = PySimLogger.get(Path(__file__).stem)
|
|
||||||
|
|
||||||
option_parser = argparse.ArgumentParser(description='Legacy tool for reading some parts of a SIM card',
|
option_parser = argparse.ArgumentParser(description='Legacy tool for reading some parts of a SIM card',
|
||||||
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||||
option_parser.add_argument("--verbose", help="Enable verbose logging", action='store_true', default=False)
|
|
||||||
argparse_add_reader_args(option_parser)
|
argparse_add_reader_args(option_parser)
|
||||||
|
|
||||||
|
|
||||||
def select_app(adf: str, card: SimCard):
|
def select_app(adf: str, card: SimCard):
|
||||||
"""Select application by its AID"""
|
"""Select application by its AID"""
|
||||||
sw = 0
|
sw = 0
|
||||||
@@ -80,9 +75,6 @@ if __name__ == '__main__':
|
|||||||
# Parse options
|
# Parse options
|
||||||
opts = option_parser.parse_args()
|
opts = option_parser.parse_args()
|
||||||
|
|
||||||
# Setup logger
|
|
||||||
PySimLogger.setup(print, {logging.WARN: "\033[33m"}, opts.verbose)
|
|
||||||
|
|
||||||
# Init card reader driver
|
# Init card reader driver
|
||||||
sl = init_reader(opts)
|
sl = init_reader(opts)
|
||||||
|
|
||||||
|
|||||||
@@ -107,12 +107,12 @@ Online manual available at https://downloads.osmocom.org/docs/pysim/master/html/
|
|||||||
kwargs = {'include_ipy': True}
|
kwargs = {'include_ipy': True}
|
||||||
|
|
||||||
self.verbose = verbose
|
self.verbose = verbose
|
||||||
PySimLogger.setup(self.poutput, {logging.WARN: YELLOW})
|
self._onchange_verbose('verbose', False, self.verbose);
|
||||||
self._onchange_verbose('verbose', False, self.verbose)
|
|
||||||
|
|
||||||
# pylint: disable=unexpected-keyword-arg
|
# pylint: disable=unexpected-keyword-arg
|
||||||
super().__init__(persistent_history_file='~/.pysim_shell_history', allow_cli_args=False,
|
super().__init__(persistent_history_file='~/.pysim_shell_history', allow_cli_args=False,
|
||||||
auto_load_commands=False, startup_script=script, **kwargs)
|
auto_load_commands=False, startup_script=script, **kwargs)
|
||||||
|
PySimLogger.setup(self.poutput, {logging.WARN: YELLOW})
|
||||||
self.intro = style(self.BANNER, fg=RED)
|
self.intro = style(self.BANNER, fg=RED)
|
||||||
self.default_category = 'pySim-shell built-in commands'
|
self.default_category = 'pySim-shell built-in commands'
|
||||||
self.card = None
|
self.card = None
|
||||||
@@ -136,7 +136,8 @@ Online manual available at https://downloads.osmocom.org/docs/pysim/master/html/
|
|||||||
self.add_settable(Settable2Compat('apdu_trace', bool, 'Trace and display APDUs exchanged with card', self,
|
self.add_settable(Settable2Compat('apdu_trace', bool, 'Trace and display APDUs exchanged with card', self,
|
||||||
onchange_cb=self._onchange_apdu_trace))
|
onchange_cb=self._onchange_apdu_trace))
|
||||||
self.add_settable(Settable2Compat('apdu_strict', bool,
|
self.add_settable(Settable2Compat('apdu_strict', bool,
|
||||||
'Strictly apply APDU format according to ISO/IEC 7816-3, table 12', self))
|
'Enforce APDU responses according to ISO/IEC 7816-3, table 12', self,
|
||||||
|
onchange_cb=self._onchange_apdu_strict))
|
||||||
self.add_settable(Settable2Compat('verbose', bool,
|
self.add_settable(Settable2Compat('verbose', bool,
|
||||||
'Enable/disable verbose logging', self,
|
'Enable/disable verbose logging', self,
|
||||||
onchange_cb=self._onchange_verbose))
|
onchange_cb=self._onchange_verbose))
|
||||||
@@ -217,6 +218,13 @@ Online manual available at https://downloads.osmocom.org/docs/pysim/master/html/
|
|||||||
else:
|
else:
|
||||||
self.card._scc._tp.apdu_tracer = None
|
self.card._scc._tp.apdu_tracer = None
|
||||||
|
|
||||||
|
def _onchange_apdu_strict(self, param_name, old, new):
|
||||||
|
if self.card:
|
||||||
|
if new == True:
|
||||||
|
self.card._scc._tp.apdu_strict = True
|
||||||
|
else:
|
||||||
|
self.card._scc._tp.apdu_strict = False
|
||||||
|
|
||||||
def _onchange_verbose(self, param_name, old, new):
|
def _onchange_verbose(self, param_name, old, new):
|
||||||
PySimLogger.set_verbose(new)
|
PySimLogger.set_verbose(new)
|
||||||
if new == True:
|
if new == True:
|
||||||
@@ -273,7 +281,7 @@ Online manual available at https://downloads.osmocom.org/docs/pysim/master/html/
|
|||||||
apdu_cmd_parser.add_argument('--expect-sw', help='expect a specified status word', type=str, default=None)
|
apdu_cmd_parser.add_argument('--expect-sw', help='expect a specified status word', type=str, default=None)
|
||||||
apdu_cmd_parser.add_argument('--expect-response-regex', help='match response against regex', type=str, default=None)
|
apdu_cmd_parser.add_argument('--expect-response-regex', help='match response against regex', type=str, default=None)
|
||||||
apdu_cmd_parser.add_argument('--raw', help='Bypass the logical channel (and secure channel)', action='store_true')
|
apdu_cmd_parser.add_argument('--raw', help='Bypass the logical channel (and secure channel)', action='store_true')
|
||||||
apdu_cmd_parser.add_argument('APDU', type=is_hexstr, help='APDU as hex string (see also: ISO/IEC 7816-3, section 12.1')
|
apdu_cmd_parser.add_argument('APDU', type=is_hexstr, help='APDU as hex string')
|
||||||
|
|
||||||
@cmd2.with_argparser(apdu_cmd_parser)
|
@cmd2.with_argparser(apdu_cmd_parser)
|
||||||
def do_apdu(self, opts):
|
def do_apdu(self, opts):
|
||||||
@@ -282,23 +290,14 @@ Online manual available at https://downloads.osmocom.org/docs/pysim/master/html/
|
|||||||
tracked. Depending on the raw APDU sent, pySim-shell may not continue to work as expected if you e.g. select
|
tracked. Depending on the raw APDU sent, pySim-shell may not continue to work as expected if you e.g. select
|
||||||
a different file."""
|
a different file."""
|
||||||
|
|
||||||
if not hasattr(self, 'apdu_strict_warning_displayed') and self.apdu_strict is False:
|
|
||||||
self.poutput("Warning: The default for the setable parameter `apdu_strict` will be changed from")
|
|
||||||
self.poutput(" `False` to `True` in future pySim-shell releases. In case you are using")
|
|
||||||
self.poutput(" the `apdu` command from a script that still mixes APDUs with TPDUs, consider")
|
|
||||||
self.poutput(" fixing or adding a `set apdu_strict false` line at the beginning.")
|
|
||||||
self.apdu_strict_warning_displayed = True;
|
|
||||||
|
|
||||||
# When sending raw APDUs we access the scc object through _scc member of the card object. It should also be
|
# When sending raw APDUs we access the scc object through _scc member of the card object. It should also be
|
||||||
# noted that the apdu command plays an exceptional role since it is the only card accessing command that
|
# noted that the apdu command plays an exceptional role since it is the only card accessing command that
|
||||||
# can be executed without the presence of a runtime state (self.rs) object. However, this also means that
|
# can be executed without the presence of a runtime state (self.rs) object. However, this also means that
|
||||||
# self.lchan is also not present (see method equip).
|
# self.lchan is also not present (see method equip).
|
||||||
self.card._scc._tp.apdu_strict = self.apdu_strict
|
|
||||||
if opts.raw or self.lchan is None:
|
if opts.raw or self.lchan is None:
|
||||||
data, sw = self.card._scc.send_apdu(opts.APDU, apply_lchan = False)
|
data, sw = self.card._scc.send_apdu(opts.APDU, apply_lchan = False)
|
||||||
else:
|
else:
|
||||||
data, sw = self.lchan.scc.send_apdu(opts.APDU, apply_lchan = False)
|
data, sw = self.lchan.scc.send_apdu(opts.APDU, apply_lchan = False)
|
||||||
self.card._scc._tp.apdu_strict = True
|
|
||||||
if data:
|
if data:
|
||||||
self.poutput("SW: %s, RESP: %s" % (sw, data))
|
self.poutput("SW: %s, RESP: %s" % (sw, data))
|
||||||
else:
|
else:
|
||||||
@@ -1176,7 +1175,13 @@ if __name__ == '__main__':
|
|||||||
opts = option_parser.parse_args()
|
opts = option_parser.parse_args()
|
||||||
|
|
||||||
# Ensure that we are able to print formatted warnings from the beginning.
|
# Ensure that we are able to print formatted warnings from the beginning.
|
||||||
PySimLogger.setup(print, {logging.WARN: YELLOW}, opts.verbose)
|
PySimLogger.setup(print, {logging.WARN: YELLOW})
|
||||||
|
if opts.verbose:
|
||||||
|
PySimLogger.set_verbose(True)
|
||||||
|
PySimLogger.set_level(logging.DEBUG)
|
||||||
|
else:
|
||||||
|
PySimLogger.set_verbose(False)
|
||||||
|
PySimLogger.set_level(logging.INFO)
|
||||||
|
|
||||||
# Register csv-file as card data provider, either from specified CSV
|
# Register csv-file as card data provider, either from specified CSV
|
||||||
# or from CSV file in home directory
|
# or from CSV file in home directory
|
||||||
|
|||||||
@@ -72,10 +72,10 @@ class ApduArDO(BER_TLV_IE, tag=0xd0):
|
|||||||
if do[0] == 0x01:
|
if do[0] == 0x01:
|
||||||
self.decoded = {'generic_access_rule': 'always'}
|
self.decoded = {'generic_access_rule': 'always'}
|
||||||
return self.decoded
|
return self.decoded
|
||||||
raise ValueError('Invalid 1-byte generic APDU access rule')
|
return ValueError('Invalid 1-byte generic APDU access rule')
|
||||||
else:
|
else:
|
||||||
if len(do) % 8:
|
if len(do) % 8:
|
||||||
raise ValueError('Invalid non-modulo-8 length of APDU filter: %d' % len(do))
|
return ValueError('Invalid non-modulo-8 length of APDU filter: %d' % len(do))
|
||||||
self.decoded = {'apdu_filter': []}
|
self.decoded = {'apdu_filter': []}
|
||||||
offset = 0
|
offset = 0
|
||||||
while offset < len(do):
|
while offset < len(do):
|
||||||
@@ -90,19 +90,19 @@ class ApduArDO(BER_TLV_IE, tag=0xd0):
|
|||||||
return b'\x00'
|
return b'\x00'
|
||||||
if self.decoded['generic_access_rule'] == 'always':
|
if self.decoded['generic_access_rule'] == 'always':
|
||||||
return b'\x01'
|
return b'\x01'
|
||||||
raise ValueError('Invalid 1-byte generic APDU access rule')
|
return ValueError('Invalid 1-byte generic APDU access rule')
|
||||||
else:
|
else:
|
||||||
if not 'apdu_filter' in self.decoded:
|
if not 'apdu_filter' in self.decoded:
|
||||||
raise ValueError('Invalid APDU AR DO')
|
return ValueError('Invalid APDU AR DO')
|
||||||
filters = self.decoded['apdu_filter']
|
filters = self.decoded['apdu_filter']
|
||||||
res = b''
|
res = b''
|
||||||
for f in filters:
|
for f in filters:
|
||||||
if not 'header' in f or not 'mask' in f:
|
if not 'header' in f or not 'mask' in f:
|
||||||
raise ValueError('APDU filter must contain header and mask')
|
return ValueError('APDU filter must contain header and mask')
|
||||||
header_b = h2b(f['header'])
|
header_b = h2b(f['header'])
|
||||||
mask_b = h2b(f['mask'])
|
mask_b = h2b(f['mask'])
|
||||||
if len(header_b) != 4 or len(mask_b) != 4:
|
if len(header_b) != 4 or len(mask_b) != 4:
|
||||||
raise ValueError('APDU filter header and mask must each be 4 bytes')
|
return ValueError('APDU filter header and mask must each be 4 bytes')
|
||||||
res += header_b + mask_b
|
res += header_b + mask_b
|
||||||
return res
|
return res
|
||||||
|
|
||||||
@@ -269,7 +269,7 @@ class ADF_ARAM(CardADF):
|
|||||||
cmd_do_enc = cmd_do.to_ie()
|
cmd_do_enc = cmd_do.to_ie()
|
||||||
cmd_do_len = len(cmd_do_enc)
|
cmd_do_len = len(cmd_do_enc)
|
||||||
if cmd_do_len > 255:
|
if cmd_do_len > 255:
|
||||||
raise ValueError('DO > 255 bytes not supported yet')
|
return ValueError('DO > 255 bytes not supported yet')
|
||||||
else:
|
else:
|
||||||
cmd_do_enc = b''
|
cmd_do_enc = b''
|
||||||
cmd_do_len = 0
|
cmd_do_len = 0
|
||||||
@@ -361,7 +361,7 @@ class ADF_ARAM(CardADF):
|
|||||||
ar_do_content += [{'apdu_ar_do': {'generic_access_rule': 'always'}}]
|
ar_do_content += [{'apdu_ar_do': {'generic_access_rule': 'always'}}]
|
||||||
elif opts.apdu_filter:
|
elif opts.apdu_filter:
|
||||||
if len(opts.apdu_filter) % 16:
|
if len(opts.apdu_filter) % 16:
|
||||||
raise ValueError(f'Invalid non-modulo-16 length of APDU filter: {len(opts.apdu_filter)}')
|
return ValueError('Invalid non-modulo-16 length of APDU filter: %d' % len(do))
|
||||||
offset = 0
|
offset = 0
|
||||||
apdu_filter = []
|
apdu_filter = []
|
||||||
while offset < len(opts.apdu_filter):
|
while offset < len(opts.apdu_filter):
|
||||||
|
|||||||
@@ -128,10 +128,10 @@ class EF_AD(TransparentEF):
|
|||||||
cell_test = 0x04
|
cell_test = 0x04
|
||||||
|
|
||||||
def __init__(self, fid='6f43', sfid=None, name='EF.AD',
|
def __init__(self, fid='6f43', sfid=None, name='EF.AD',
|
||||||
desc='Administrative Data', size=(3, None), **kwargs):
|
desc='Service Provider Name', size=(3, None), **kwargs):
|
||||||
super().__init__(fid, sfid=sfid, name=name, desc=desc, size=size, **kwargs)
|
super().__init__(fid, sfid=sfid, name=name, desc=desc, size=size, **kwargs)
|
||||||
self._construct = Struct(
|
self._construct = Struct(
|
||||||
# Byte 1: MS operation mode
|
# Byte 1: Display Condition
|
||||||
'ms_operation_mode'/Enum(Byte, self.OP_MODE),
|
'ms_operation_mode'/Enum(Byte, self.OP_MODE),
|
||||||
# Bytes 2-3: Additional information
|
# Bytes 2-3: Additional information
|
||||||
'additional_info'/Bytes(2),
|
'additional_info'/Bytes(2),
|
||||||
|
|||||||
@@ -16,6 +16,12 @@
|
|||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
from klein import Klein
|
||||||
|
from twisted.internet import defer, protocol, ssl, task, endpoints, reactor
|
||||||
|
from twisted.internet.posixbase import PosixReactorBase
|
||||||
|
from pathlib import Path
|
||||||
|
from twisted.web.server import Site, Request
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import time
|
import time
|
||||||
@@ -123,10 +129,12 @@ class Es2PlusApiFunction(JsonHttpApiFunction):
|
|||||||
class DownloadOrder(Es2PlusApiFunction):
|
class DownloadOrder(Es2PlusApiFunction):
|
||||||
path = '/gsma/rsp2/es2plus/downloadOrder'
|
path = '/gsma/rsp2/es2plus/downloadOrder'
|
||||||
input_params = {
|
input_params = {
|
||||||
|
'header': JsonRequestHeader,
|
||||||
'eid': param.Eid,
|
'eid': param.Eid,
|
||||||
'iccid': param.Iccid,
|
'iccid': param.Iccid,
|
||||||
'profileType': param.ProfileType
|
'profileType': param.ProfileType
|
||||||
}
|
}
|
||||||
|
input_mandatory = ['header']
|
||||||
output_params = {
|
output_params = {
|
||||||
'header': JsonResponseHeader,
|
'header': JsonResponseHeader,
|
||||||
'iccid': param.Iccid,
|
'iccid': param.Iccid,
|
||||||
@@ -137,6 +145,7 @@ class DownloadOrder(Es2PlusApiFunction):
|
|||||||
class ConfirmOrder(Es2PlusApiFunction):
|
class ConfirmOrder(Es2PlusApiFunction):
|
||||||
path = '/gsma/rsp2/es2plus/confirmOrder'
|
path = '/gsma/rsp2/es2plus/confirmOrder'
|
||||||
input_params = {
|
input_params = {
|
||||||
|
'header': JsonRequestHeader,
|
||||||
'iccid': param.Iccid,
|
'iccid': param.Iccid,
|
||||||
'eid': param.Eid,
|
'eid': param.Eid,
|
||||||
'matchingId': param.MatchingId,
|
'matchingId': param.MatchingId,
|
||||||
@@ -144,7 +153,7 @@ class ConfirmOrder(Es2PlusApiFunction):
|
|||||||
'smdsAddress': param.SmdsAddress,
|
'smdsAddress': param.SmdsAddress,
|
||||||
'releaseFlag': param.ReleaseFlag,
|
'releaseFlag': param.ReleaseFlag,
|
||||||
}
|
}
|
||||||
input_mandatory = ['iccid', 'releaseFlag']
|
input_mandatory = ['header', 'iccid', 'releaseFlag']
|
||||||
output_params = {
|
output_params = {
|
||||||
'header': JsonResponseHeader,
|
'header': JsonResponseHeader,
|
||||||
'eid': param.Eid,
|
'eid': param.Eid,
|
||||||
@@ -157,12 +166,13 @@ class ConfirmOrder(Es2PlusApiFunction):
|
|||||||
class CancelOrder(Es2PlusApiFunction):
|
class CancelOrder(Es2PlusApiFunction):
|
||||||
path = '/gsma/rsp2/es2plus/cancelOrder'
|
path = '/gsma/rsp2/es2plus/cancelOrder'
|
||||||
input_params = {
|
input_params = {
|
||||||
|
'header': JsonRequestHeader,
|
||||||
'iccid': param.Iccid,
|
'iccid': param.Iccid,
|
||||||
'eid': param.Eid,
|
'eid': param.Eid,
|
||||||
'matchingId': param.MatchingId,
|
'matchingId': param.MatchingId,
|
||||||
'finalProfileStatusIndicator': param.FinalProfileStatusIndicator,
|
'finalProfileStatusIndicator': param.FinalProfileStatusIndicator,
|
||||||
}
|
}
|
||||||
input_mandatory = ['finalProfileStatusIndicator', 'iccid']
|
input_mandatory = ['header', 'finalProfileStatusIndicator', 'iccid']
|
||||||
output_params = {
|
output_params = {
|
||||||
'header': JsonResponseHeader,
|
'header': JsonResponseHeader,
|
||||||
}
|
}
|
||||||
@@ -172,9 +182,10 @@ class CancelOrder(Es2PlusApiFunction):
|
|||||||
class ReleaseProfile(Es2PlusApiFunction):
|
class ReleaseProfile(Es2PlusApiFunction):
|
||||||
path = '/gsma/rsp2/es2plus/releaseProfile'
|
path = '/gsma/rsp2/es2plus/releaseProfile'
|
||||||
input_params = {
|
input_params = {
|
||||||
|
'header': JsonRequestHeader,
|
||||||
'iccid': param.Iccid,
|
'iccid': param.Iccid,
|
||||||
}
|
}
|
||||||
input_mandatory = ['iccid']
|
input_mandatory = ['header', 'iccid']
|
||||||
output_params = {
|
output_params = {
|
||||||
'header': JsonResponseHeader,
|
'header': JsonResponseHeader,
|
||||||
}
|
}
|
||||||
@@ -184,6 +195,7 @@ class ReleaseProfile(Es2PlusApiFunction):
|
|||||||
class HandleDownloadProgressInfo(Es2PlusApiFunction):
|
class HandleDownloadProgressInfo(Es2PlusApiFunction):
|
||||||
path = '/gsma/rsp2/es2plus/handleDownloadProgressInfo'
|
path = '/gsma/rsp2/es2plus/handleDownloadProgressInfo'
|
||||||
input_params = {
|
input_params = {
|
||||||
|
'header': JsonRequestHeader,
|
||||||
'eid': param.Eid,
|
'eid': param.Eid,
|
||||||
'iccid': param.Iccid,
|
'iccid': param.Iccid,
|
||||||
'profileType': param.ProfileType,
|
'profileType': param.ProfileType,
|
||||||
@@ -192,10 +204,9 @@ class HandleDownloadProgressInfo(Es2PlusApiFunction):
|
|||||||
'notificationPointStatus': param.NotificationPointStatus,
|
'notificationPointStatus': param.NotificationPointStatus,
|
||||||
'resultData': param.ResultData,
|
'resultData': param.ResultData,
|
||||||
}
|
}
|
||||||
input_mandatory = ['iccid', 'profileType', 'timestamp', 'notificationPointId', 'notificationPointStatus']
|
input_mandatory = ['header', 'iccid', 'profileType', 'timestamp', 'notificationPointId', 'notificationPointStatus']
|
||||||
expected_http_status = 204
|
expected_http_status = 204
|
||||||
|
|
||||||
|
|
||||||
class Es2pApiClient:
|
class Es2pApiClient:
|
||||||
"""Main class representing a full ES2+ API client. Has one method for each API function."""
|
"""Main class representing a full ES2+ API client. Has one method for each API function."""
|
||||||
def __init__(self, url_prefix:str, func_req_id:str, server_cert_verify: str = None, client_cert: str = None):
|
def __init__(self, url_prefix:str, func_req_id:str, server_cert_verify: str = None, client_cert: str = None):
|
||||||
@@ -206,18 +217,17 @@ class Es2pApiClient:
|
|||||||
if client_cert:
|
if client_cert:
|
||||||
self.session.cert = client_cert
|
self.session.cert = client_cert
|
||||||
|
|
||||||
self.downloadOrder = DownloadOrder(url_prefix, func_req_id, self.session)
|
self.downloadOrder = JsonHttpApiClient(DownloadOrder(), url_prefix, func_req_id, self.session)
|
||||||
self.confirmOrder = ConfirmOrder(url_prefix, func_req_id, self.session)
|
self.confirmOrder = JsonHttpApiClient(ConfirmOrder(), url_prefix, func_req_id, self.session)
|
||||||
self.cancelOrder = CancelOrder(url_prefix, func_req_id, self.session)
|
self.cancelOrder = JsonHttpApiClient(CancelOrder(), url_prefix, func_req_id, self.session)
|
||||||
self.releaseProfile = ReleaseProfile(url_prefix, func_req_id, self.session)
|
self.releaseProfile = JsonHttpApiClient(ReleaseProfile(), url_prefix, func_req_id, self.session)
|
||||||
self.handleDownloadProgressInfo = HandleDownloadProgressInfo(url_prefix, func_req_id, self.session)
|
self.handleDownloadProgressInfo = JsonHttpApiClient(HandleDownloadProgressInfo(), url_prefix, func_req_id, self.session)
|
||||||
|
|
||||||
def _gen_func_id(self) -> str:
|
def _gen_func_id(self) -> str:
|
||||||
"""Generate the next function call id."""
|
"""Generate the next function call id."""
|
||||||
self.func_id += 1
|
self.func_id += 1
|
||||||
return 'FCI-%u-%u' % (time.time(), self.func_id)
|
return 'FCI-%u-%u' % (time.time(), self.func_id)
|
||||||
|
|
||||||
|
|
||||||
def call_downloadOrder(self, data: dict) -> dict:
|
def call_downloadOrder(self, data: dict) -> dict:
|
||||||
"""Perform ES2+ DownloadOrder function (SGP.22 section 5.3.1)."""
|
"""Perform ES2+ DownloadOrder function (SGP.22 section 5.3.1)."""
|
||||||
return self.downloadOrder.call(data, self._gen_func_id())
|
return self.downloadOrder.call(data, self._gen_func_id())
|
||||||
@@ -237,3 +247,116 @@ class Es2pApiClient:
|
|||||||
def call_handleDownloadProgressInfo(self, data: dict) -> dict:
|
def call_handleDownloadProgressInfo(self, data: dict) -> dict:
|
||||||
"""Perform ES2+ HandleDownloadProgressInfo function (SGP.22 section 5.3.5)."""
|
"""Perform ES2+ HandleDownloadProgressInfo function (SGP.22 section 5.3.5)."""
|
||||||
return self.handleDownloadProgressInfo.call(data, self._gen_func_id())
|
return self.handleDownloadProgressInfo.call(data, self._gen_func_id())
|
||||||
|
|
||||||
|
class Es2pApiServerHandlerSmdpp(abc.ABC):
|
||||||
|
"""ES2+ (SMDP+ side) API Server handler class. The API user is expected to override the contained methods."""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def call_downloadOrder(self, data: dict) -> (dict, str):
|
||||||
|
"""Perform ES2+ DownloadOrder function (SGP.22 section 5.3.1)."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def call_confirmOrder(self, data: dict) -> (dict, str):
|
||||||
|
"""Perform ES2+ ConfirmOrder function (SGP.22 section 5.3.2)."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def call_cancelOrder(self, data: dict) -> (dict, str):
|
||||||
|
"""Perform ES2+ CancelOrder function (SGP.22 section 5.3.3)."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def call_releaseProfile(self, data: dict) -> (dict, str):
|
||||||
|
"""Perform ES2+ CancelOrder function (SGP.22 section 5.3.4)."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Es2pApiServerHandlerMno(abc.ABC):
|
||||||
|
"""ES2+ (MNO side) API Server handler class. The API user is expected to override the contained methods."""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def call_handleDownloadProgressInfo(self, data: dict) -> (dict, str):
|
||||||
|
"""Perform ES2+ HandleDownloadProgressInfo function (SGP.22 section 5.3.5)."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Es2pApiServer(abc.ABC):
|
||||||
|
"""Main class representing a full ES2+ API server. Has one method for each API function."""
|
||||||
|
app = None
|
||||||
|
|
||||||
|
def __init__(self, port: int, interface: str, server_cert: str = None, client_cert_verify: str = None):
|
||||||
|
logger.debug("HTTP SRV: starting ES2+ API server on %s:%s" % (interface, port))
|
||||||
|
self.port = port
|
||||||
|
self.interface = interface
|
||||||
|
if server_cert:
|
||||||
|
self.server_cert = ssl.PrivateCertificate.loadPEM(Path(server_cert).read_text())
|
||||||
|
else:
|
||||||
|
self.server_cert = None
|
||||||
|
if client_cert_verify:
|
||||||
|
self.client_cert_verify = ssl.Certificate.loadPEM(Path(client_cert_verify).read_text())
|
||||||
|
else:
|
||||||
|
self.client_cert_verify = None
|
||||||
|
|
||||||
|
def reactor(self, reactor: PosixReactorBase):
|
||||||
|
logger.debug("HTTP SRV: listen on %s:%s" % (self.interface, self.port))
|
||||||
|
if self.server_cert:
|
||||||
|
if self.client_cert_verify:
|
||||||
|
reactor.listenSSL(self.port, Site(self.app.resource()), self.server_cert.options(self.client_cert_verify),
|
||||||
|
interface=self.interface)
|
||||||
|
else:
|
||||||
|
reactor.listenSSL(self.port, Site(self.app.resource()), self.server_cert.options(),
|
||||||
|
interface=self.interface)
|
||||||
|
else:
|
||||||
|
reactor.listenTCP(self.port, Site(self.app.resource()), interface=self.interface)
|
||||||
|
return defer.Deferred()
|
||||||
|
|
||||||
|
class Es2pApiServerSmdpp(Es2pApiServer):
|
||||||
|
"""ES2+ (SMDP+ side) API Server."""
|
||||||
|
app = Klein()
|
||||||
|
|
||||||
|
def __init__(self, port: int, interface: str, handler: Es2pApiServerHandlerSmdpp,
|
||||||
|
server_cert: str = None, client_cert_verify: str = None):
|
||||||
|
super().__init__(port, interface, server_cert, client_cert_verify)
|
||||||
|
self.handler = handler
|
||||||
|
self.downloadOrder = JsonHttpApiServer(DownloadOrder(), handler.call_downloadOrder)
|
||||||
|
self.confirmOrder = JsonHttpApiServer(ConfirmOrder(), handler.call_confirmOrder)
|
||||||
|
self.cancelOrder = JsonHttpApiServer(CancelOrder(), handler.call_cancelOrder)
|
||||||
|
self.releaseProfile = JsonHttpApiServer(ReleaseProfile(), handler.call_releaseProfile)
|
||||||
|
task.react(self.reactor)
|
||||||
|
|
||||||
|
@app.route(DownloadOrder.path)
|
||||||
|
def call_downloadOrder(self, request: Request) -> dict:
|
||||||
|
"""Perform ES2+ DownloadOrder function (SGP.22 section 5.3.1)."""
|
||||||
|
return self.downloadOrder.call(request)
|
||||||
|
|
||||||
|
@app.route(ConfirmOrder.path)
|
||||||
|
def call_confirmOrder(self, request: Request) -> dict:
|
||||||
|
"""Perform ES2+ ConfirmOrder function (SGP.22 section 5.3.2)."""
|
||||||
|
return self.confirmOrder.call(request)
|
||||||
|
|
||||||
|
@app.route(CancelOrder.path)
|
||||||
|
def call_cancelOrder(self, request: Request) -> dict:
|
||||||
|
"""Perform ES2+ CancelOrder function (SGP.22 section 5.3.3)."""
|
||||||
|
return self.cancelOrder.call(request)
|
||||||
|
|
||||||
|
@app.route(ReleaseProfile.path)
|
||||||
|
def call_releaseProfile(self, request: Request) -> dict:
|
||||||
|
"""Perform ES2+ CancelOrder function (SGP.22 section 5.3.4)."""
|
||||||
|
return self.releaseProfile.call(request)
|
||||||
|
|
||||||
|
class Es2pApiServerMno(Es2pApiServer):
|
||||||
|
"""ES2+ (MNO side) API Server."""
|
||||||
|
|
||||||
|
app = Klein()
|
||||||
|
|
||||||
|
def __init__(self, port: int, interface: str, handler: Es2pApiServerHandlerMno,
|
||||||
|
server_cert: str = None, client_cert_verify: str = None):
|
||||||
|
super().__init__(port, interface, server_cert, client_cert_verify)
|
||||||
|
self.handler = handler
|
||||||
|
self.handleDownloadProgressInfo = JsonHttpApiServer(HandleDownloadProgressInfo(),
|
||||||
|
handler.call_handleDownloadProgressInfo)
|
||||||
|
task.react(self.reactor)
|
||||||
|
|
||||||
|
@app.route(HandleDownloadProgressInfo.path)
|
||||||
|
def call_handleDownloadProgressInfo(self, request: Request) -> dict:
|
||||||
|
"""Perform ES2+ HandleDownloadProgressInfo function (SGP.22 section 5.3.5)."""
|
||||||
|
return self.handleDownloadProgressInfo.call(request)
|
||||||
|
|||||||
@@ -155,11 +155,11 @@ class Es9pApiClient:
|
|||||||
if server_cert_verify:
|
if server_cert_verify:
|
||||||
self.session.verify = server_cert_verify
|
self.session.verify = server_cert_verify
|
||||||
|
|
||||||
self.initiateAuthentication = InitiateAuthentication(url_prefix, '', self.session)
|
self.initiateAuthentication = JsonHttpApiClient(InitiateAuthentication(), url_prefix, '', self.session)
|
||||||
self.authenticateClient = AuthenticateClient(url_prefix, '', self.session)
|
self.authenticateClient = JsonHttpApiClient(AuthenticateClient(), url_prefix, '', self.session)
|
||||||
self.getBoundProfilePackage = GetBoundProfilePackage(url_prefix, '', self.session)
|
self.getBoundProfilePackage = JsonHttpApiClient(GetBoundProfilePackage(), url_prefix, '', self.session)
|
||||||
self.handleNotification = HandleNotification(url_prefix, '', self.session)
|
self.handleNotification = JsonHttpApiClient(HandleNotification(), url_prefix, '', self.session)
|
||||||
self.cancelSession = CancelSession(url_prefix, '', self.session)
|
self.cancelSession = JsonHttpApiClient(CancelSession(), url_prefix, '', self.session)
|
||||||
|
|
||||||
def call_initiateAuthentication(self, data: dict) -> dict:
|
def call_initiateAuthentication(self, data: dict) -> dict:
|
||||||
return self.initiateAuthentication.call(data)
|
return self.initiateAuthentication.call(data)
|
||||||
|
|||||||
@@ -21,6 +21,8 @@ import logging
|
|||||||
import json
|
import json
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
import base64
|
import base64
|
||||||
|
from twisted.web.server import Request
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
logger.setLevel(logging.DEBUG)
|
logger.setLevel(logging.DEBUG)
|
||||||
@@ -131,6 +133,16 @@ class JsonResponseHeader(ApiParam):
|
|||||||
if status not in ['Executed-Success', 'Executed-WithWarning', 'Failed', 'Expired']:
|
if status not in ['Executed-Success', 'Executed-WithWarning', 'Failed', 'Expired']:
|
||||||
raise ValueError('Unknown/unspecified status "%s"' % status)
|
raise ValueError('Unknown/unspecified status "%s"' % status)
|
||||||
|
|
||||||
|
class JsonRequestHeader(ApiParam):
|
||||||
|
"""SGP.22 section 6.5.1.3."""
|
||||||
|
@classmethod
|
||||||
|
def verify_decoded(cls, data):
|
||||||
|
func_req_id = data.get('functionRequesterIdentifier')
|
||||||
|
if not func_req_id:
|
||||||
|
raise ValueError('Missing mandatory functionRequesterIdentifier in header')
|
||||||
|
func_call_id = data.get('functionCallIdentifier')
|
||||||
|
if not func_call_id:
|
||||||
|
raise ValueError('Missing mandatory functionCallIdentifier in header')
|
||||||
|
|
||||||
class HttpStatusError(Exception):
|
class HttpStatusError(Exception):
|
||||||
pass
|
pass
|
||||||
@@ -161,65 +173,118 @@ class ApiError(Exception):
|
|||||||
|
|
||||||
class JsonHttpApiFunction(abc.ABC):
|
class JsonHttpApiFunction(abc.ABC):
|
||||||
"""Base class for representing an HTTP[s] API Function."""
|
"""Base class for representing an HTTP[s] API Function."""
|
||||||
# the below class variables are expected to be overridden in derived classes
|
# The below class variables are used to describe the properties of the API function. Derived classes are expected
|
||||||
|
# to orverride those class properties with useful values. The prefixes "input_" and "output_" refer to the API
|
||||||
|
# function from an abstract point of view. Seen from the client perspective, "input_" will refer to parameters the
|
||||||
|
# client sends to a HTTP server. Seen from the server perspective, "input_" will refer to parameters the server
|
||||||
|
# receives from the a requesting client. The same applies vice versa to class variables that have an "output_"
|
||||||
|
# prefix.
|
||||||
|
|
||||||
|
# path of the API function (e.g. '/gsma/rsp2/es2plus/confirmOrder')
|
||||||
path = None
|
path = None
|
||||||
|
|
||||||
# dictionary of input parameters. key is parameter name, value is ApiParam class
|
# dictionary of input parameters. key is parameter name, value is ApiParam class
|
||||||
input_params = {}
|
input_params = {}
|
||||||
|
|
||||||
# list of mandatory input parameters
|
# list of mandatory input parameters
|
||||||
input_mandatory = []
|
input_mandatory = []
|
||||||
|
|
||||||
# dictionary of output parameters. key is parameter name, value is ApiParam class
|
# dictionary of output parameters. key is parameter name, value is ApiParam class
|
||||||
output_params = {}
|
output_params = {}
|
||||||
|
|
||||||
# list of mandatory output parameters (for successful response)
|
# list of mandatory output parameters (for successful response)
|
||||||
output_mandatory = []
|
output_mandatory = []
|
||||||
|
|
||||||
|
# list of mandatory output parameters (for failed response)
|
||||||
|
output_mandatory_failed = []
|
||||||
|
|
||||||
# expected HTTP status code of the response
|
# expected HTTP status code of the response
|
||||||
expected_http_status = 200
|
expected_http_status = 200
|
||||||
|
|
||||||
# the HTTP method used (GET, OPTIONS, HEAD, POST, PUT, PATCH or DELETE)
|
# the HTTP method used (GET, OPTIONS, HEAD, POST, PUT, PATCH or DELETE)
|
||||||
http_method = 'POST'
|
http_method = 'POST'
|
||||||
|
|
||||||
|
# additional custom HTTP headers (client requests)
|
||||||
extra_http_req_headers = {}
|
extra_http_req_headers = {}
|
||||||
|
|
||||||
def __init__(self, url_prefix: str, func_req_id: Optional[str], session: requests.Session):
|
# additional custom HTTP headers (server responses)
|
||||||
self.url_prefix = url_prefix
|
extra_http_res_headers = {}
|
||||||
self.func_req_id = func_req_id
|
|
||||||
self.session = session
|
|
||||||
|
|
||||||
def encode(self, data: dict, func_call_id: Optional[str] = None) -> dict:
|
def __new__(cls, *args, role = 'legacy_client', **kwargs):
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
args: (see JsonHttpApiClient and JsonHttpApiServer)
|
||||||
|
role: role ('server' or 'client') in which the JsonHttpApiFunction should be created.
|
||||||
|
kwargs: (see JsonHttpApiClient and JsonHttpApiServer)
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Create a dictionary with the class attributes of this class (the properties listed above and the encode_
|
||||||
|
# decode_ methods below). The dictionary will not include any dunder/magic methods
|
||||||
|
cls_attr = {attr_name: getattr(cls, attr_name) for attr_name in dir(cls) if not attr_name.startswith('__')}
|
||||||
|
|
||||||
|
# Normal instantiation as JsonHttpApiFunction:
|
||||||
|
if len(args) == 0 and len(kwargs) == 0:
|
||||||
|
return type(cls.__name__, (abc.ABC,), cls_attr)()
|
||||||
|
|
||||||
|
# Instantiation as as JsonHttpApiFunction with a JsonHttpApiClient or JsonHttpApiServer base
|
||||||
|
if role == 'legacy_client':
|
||||||
|
# Deprecated: With the advent of the server role (JsonHttpApiServer) the API had to be changed. To maintain
|
||||||
|
# compatibility with existing code (out-of-tree) the original behaviour and API interface and behaviour had
|
||||||
|
# to be preserved. Already existing JsonHttpApiFunction definitions will still work and the related objects
|
||||||
|
# may still be created on the original way: my_api_func = MyApiFunc(url_prefix, func_req_id, self.session)
|
||||||
|
logger.warning('implicit role (falling back to legacy JsonHttpApiClient) is deprecated, please specify role explcitly')
|
||||||
|
result = type(cls.__name__, (JsonHttpApiClient,), cls_attr)(None, *args, **kwargs)
|
||||||
|
result.api_func = result
|
||||||
|
result.legacy = True
|
||||||
|
return result
|
||||||
|
elif role == 'client':
|
||||||
|
# Create a JsonHttpApiFunction in client role
|
||||||
|
# Example: my_api_func = MyApiFunc(url_prefix, func_req_id, self.session, role='client')
|
||||||
|
result = type(cls.__name__, (JsonHttpApiClient,), cls_attr)(None, *args, **kwargs)
|
||||||
|
result.api_func = result
|
||||||
|
return result
|
||||||
|
elif role == 'server':
|
||||||
|
# Create a JsonHttpApiFunction in server role
|
||||||
|
# Example: my_api_func = MyApiFunc(url_prefix, func_req_id, self.session, role='server')
|
||||||
|
result = type(cls.__name__, (JsonHttpApiServer,), cls_attr)(None, *args, **kwargs)
|
||||||
|
result.api_func = result
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
raise ValueError('Invalid role \'%s\' specified' % role)
|
||||||
|
|
||||||
|
def encode_client(self, data: dict) -> dict:
|
||||||
"""Validate an encode input dict into JSON-serializable dict for request body."""
|
"""Validate an encode input dict into JSON-serializable dict for request body."""
|
||||||
output = {}
|
output = {}
|
||||||
if func_call_id:
|
|
||||||
output['header'] = {
|
|
||||||
'functionRequesterIdentifier': self.func_req_id,
|
|
||||||
'functionCallIdentifier': func_call_id
|
|
||||||
}
|
|
||||||
|
|
||||||
for p in self.input_mandatory:
|
for p in self.input_mandatory:
|
||||||
if not p in data:
|
if not p in data:
|
||||||
raise ValueError('Mandatory input parameter %s missing' % p)
|
raise ValueError('Mandatory input parameter %s missing' % p)
|
||||||
for p, v in data.items():
|
for p, v in data.items():
|
||||||
p_class = self.input_params.get(p)
|
p_class = self.input_params.get(p)
|
||||||
if not p_class:
|
if not p_class:
|
||||||
logger.warning('Unexpected/unsupported input parameter %s=%s', p, v)
|
# pySim/esim/http_json_api.py:269:47: E1101: Instance of 'JsonHttpApiFunction' has no 'legacy' member (no-member)
|
||||||
output[p] = v
|
# pylint: disable=no-member
|
||||||
|
if hasattr(self, 'legacy') and self.legacy:
|
||||||
|
output[p] = JsonRequestHeader.encode(v)
|
||||||
|
else:
|
||||||
|
logger.warning('Unexpected/unsupported input parameter %s=%s', p, v)
|
||||||
|
output[p] = v
|
||||||
else:
|
else:
|
||||||
output[p] = p_class.encode(v)
|
output[p] = p_class.encode(v)
|
||||||
return output
|
return output
|
||||||
|
|
||||||
def decode(self, data: dict) -> dict:
|
def decode_client(self, data: dict) -> dict:
|
||||||
"""[further] Decode and validate the JSON-Dict of the response body."""
|
"""[further] Decode and validate the JSON-Dict of the response body."""
|
||||||
output = {}
|
output = {}
|
||||||
if 'header' in self.output_params:
|
output_mandatory = self.output_mandatory
|
||||||
# let's first do the header, it's special
|
|
||||||
if not 'header' in data:
|
|
||||||
raise ValueError('Mandatory output parameter "header" missing')
|
|
||||||
hdr_class = self.output_params.get('header')
|
|
||||||
output['header'] = hdr_class.decode(data['header'])
|
|
||||||
|
|
||||||
if output['header']['functionExecutionStatus']['status'] not in ['Executed-Success','Executed-WithWarning']:
|
# In case a provided header (may be optional) indicates that the API function call was unsuccessful, a
|
||||||
raise ApiError(output['header']['functionExecutionStatus'])
|
# different set of mandatory parameters applies.
|
||||||
# we can only expect mandatory parameters to be present in case of successful execution
|
header = data.get('header')
|
||||||
for p in self.output_mandatory:
|
if header:
|
||||||
if p == 'header':
|
if data['header']['functionExecutionStatus']['status'] not in ['Executed-Success','Executed-WithWarning']:
|
||||||
continue
|
output_mandatory = self.output_mandatory_failed
|
||||||
|
|
||||||
|
for p in output_mandatory:
|
||||||
if not p in data:
|
if not p in data:
|
||||||
raise ValueError('Mandatory output parameter "%s" missing' % p)
|
raise ValueError('Mandatory output parameter "%s" missing' % p)
|
||||||
for p, v in data.items():
|
for p, v in data.items():
|
||||||
@@ -231,35 +296,171 @@ class JsonHttpApiFunction(abc.ABC):
|
|||||||
output[p] = p_class.decode(v)
|
output[p] = p_class.decode(v)
|
||||||
return output
|
return output
|
||||||
|
|
||||||
|
def encode_server(self, data: dict) -> dict:
|
||||||
|
"""Validate an encode input dict into JSON-serializable dict for response body."""
|
||||||
|
output = {}
|
||||||
|
output_mandatory = self.output_mandatory
|
||||||
|
|
||||||
|
# In case a provided header (may be optional) indicates that the API function call was unsuccessful, a
|
||||||
|
# different set of mandatory parameters applies.
|
||||||
|
header = data.get('header')
|
||||||
|
if header:
|
||||||
|
if data['header']['functionExecutionStatus']['status'] not in ['Executed-Success','Executed-WithWarning']:
|
||||||
|
output_mandatory = self.output_mandatory_failed
|
||||||
|
|
||||||
|
for p in output_mandatory:
|
||||||
|
if not p in data:
|
||||||
|
raise ValueError('Mandatory output parameter %s missing' % p)
|
||||||
|
for p, v in data.items():
|
||||||
|
p_class = self.output_params.get(p)
|
||||||
|
if not p_class:
|
||||||
|
logger.warning('Unexpected/unsupported output parameter %s=%s', p, v)
|
||||||
|
output[p] = v
|
||||||
|
else:
|
||||||
|
output[p] = p_class.encode(v)
|
||||||
|
return output
|
||||||
|
|
||||||
|
def decode_server(self, data: dict) -> dict:
|
||||||
|
"""[further] Decode and validate the JSON-Dict of the request body."""
|
||||||
|
output = {}
|
||||||
|
|
||||||
|
for p in self.input_mandatory:
|
||||||
|
if not p in data:
|
||||||
|
raise ValueError('Mandatory input parameter "%s" missing' % p)
|
||||||
|
for p, v in data.items():
|
||||||
|
p_class = self.input_params.get(p)
|
||||||
|
if not p_class:
|
||||||
|
logger.warning('Unexpected/unsupported input parameter "%s"="%s"', p, v)
|
||||||
|
output[p] = v
|
||||||
|
else:
|
||||||
|
output[p] = p_class.decode(v)
|
||||||
|
return output
|
||||||
|
|
||||||
|
class JsonHttpApiClient():
|
||||||
|
def __init__(self, api_func: JsonHttpApiFunction, url_prefix: str, func_req_id: Optional[str],
|
||||||
|
session: requests.Session):
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
api_func : API function definition (JsonHttpApiFunction)
|
||||||
|
url_prefix : prefix to be put in front of the API function path (see JsonHttpApiFunction)
|
||||||
|
func_req_id : function requestor id to use for requests
|
||||||
|
session : session object (requests)
|
||||||
|
"""
|
||||||
|
self.api_func = api_func
|
||||||
|
self.url_prefix = url_prefix
|
||||||
|
self.func_req_id = func_req_id
|
||||||
|
self.session = session
|
||||||
|
|
||||||
def call(self, data: dict, func_call_id: Optional[str] = None, timeout=10) -> Optional[dict]:
|
def call(self, data: dict, func_call_id: Optional[str] = None, timeout=10) -> Optional[dict]:
|
||||||
"""Make an API call to the HTTP API endpoint represented by this object.
|
"""Make an API call to the HTTP API endpoint represented by this object. Input data is passed in `data` as
|
||||||
Input data is passed in `data` as json-serializable dict. Output data
|
json-serializable dict. Output data is returned as json-deserialized dict."""
|
||||||
is returned as json-deserialized dict."""
|
|
||||||
url = self.url_prefix + self.path
|
# In case a function caller ID is supplied, use it together with the stored function requestor ID to generate
|
||||||
encoded = json.dumps(self.encode(data, func_call_id))
|
# and prepend the header field according to SGP.22, section 6.5.1.1 and 6.5.1.3. (the presence of the header
|
||||||
|
# field is checked by the encode_client method)
|
||||||
|
if func_call_id:
|
||||||
|
data = {'header' : {'functionRequesterIdentifier': self.func_req_id,
|
||||||
|
'functionCallIdentifier': func_call_id}} | data
|
||||||
|
|
||||||
|
# Encode the message (the presence of mandatory fields is checked during encoding)
|
||||||
|
encoded = json.dumps(self.api_func.encode_client(data))
|
||||||
|
|
||||||
|
# Apply HTTP request headers according to SGP.22, section 6.5.1
|
||||||
req_headers = {
|
req_headers = {
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
'X-Admin-Protocol': 'gsma/rsp/v2.5.0',
|
'X-Admin-Protocol': 'gsma/rsp/v2.5.0',
|
||||||
}
|
}
|
||||||
req_headers.update(self.extra_http_req_headers)
|
req_headers.update(self.api_func.extra_http_req_headers)
|
||||||
|
|
||||||
|
# Perform HTTP request
|
||||||
|
url = self.url_prefix + self.api_func.path
|
||||||
logger.debug("HTTP REQ %s - hdr: %s '%s'" % (url, req_headers, encoded))
|
logger.debug("HTTP REQ %s - hdr: %s '%s'" % (url, req_headers, encoded))
|
||||||
response = self.session.request(self.http_method, url, data=encoded, headers=req_headers, timeout=timeout)
|
response = self.session.request(self.api_func.http_method, url, data=encoded, headers=req_headers, timeout=timeout)
|
||||||
logger.debug("HTTP RSP-STS: [%u] hdr: %s" % (response.status_code, response.headers))
|
logger.debug("HTTP RSP-STS: [%u] hdr: %s" % (response.status_code, response.headers))
|
||||||
logger.debug("HTTP RSP: %s" % (response.content))
|
logger.debug("HTTP RSP: %s" % (response.content))
|
||||||
|
|
||||||
if response.status_code != self.expected_http_status:
|
# Check HTTP response status code and make sure that the returned HTTP headers look plausible (according to
|
||||||
|
# SGP.22, section 6.5.1)
|
||||||
|
if response.status_code != self.api_func.expected_http_status:
|
||||||
raise HttpStatusError(response)
|
raise HttpStatusError(response)
|
||||||
if not response.headers.get('Content-Type').startswith(req_headers['Content-Type']):
|
|
||||||
raise HttpHeaderError(response)
|
|
||||||
if not response.headers.get('X-Admin-Protocol', 'gsma/rsp/v2.unknown').startswith('gsma/rsp/v2.'):
|
if not response.headers.get('X-Admin-Protocol', 'gsma/rsp/v2.unknown').startswith('gsma/rsp/v2.'):
|
||||||
raise HttpHeaderError(response)
|
raise HttpHeaderError(response)
|
||||||
|
|
||||||
|
# Decode response and return the result back to the caller
|
||||||
if response.content:
|
if response.content:
|
||||||
if response.headers.get('Content-Type').startswith('application/json'):
|
if response.headers.get('Content-Type').startswith('application/json'):
|
||||||
return self.decode(response.json())
|
output = self.api_func.decode_client(response.json())
|
||||||
elif response.headers.get('Content-Type').startswith('text/plain;charset=UTF-8'):
|
elif response.headers.get('Content-Type').startswith('text/plain;charset=UTF-8'):
|
||||||
return { 'data': response.content.decode('utf-8') }
|
output = { 'data': response.content.decode('utf-8') }
|
||||||
raise HttpHeaderError(f'unimplemented response Content-Type: {response.headers=!r}')
|
else:
|
||||||
|
raise HttpHeaderError(f'unimplemented response Content-Type: {response.headers=!r}')
|
||||||
|
|
||||||
|
# In case the response contains a header, check it to make sure that the API call was executed successfully
|
||||||
|
# (the presence of the header field is checked by the decode_client method)
|
||||||
|
if 'header' in output:
|
||||||
|
if output['header']['functionExecutionStatus']['status'] not in ['Executed-Success','Executed-WithWarning']:
|
||||||
|
raise ApiError(output['header']['functionExecutionStatus'])
|
||||||
|
return output
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
class JsonHttpApiServer():
|
||||||
|
def __init__(self, api_func: JsonHttpApiFunction, call_handler = None):
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
api_func : API function definition (JsonHttpApiFunction)
|
||||||
|
call_handler : handler function to process the request. This function must accept the
|
||||||
|
decoded request as a dictionary. The handler function must return a tuple consisting
|
||||||
|
of the response in the form of a dictionary (may be empty), and a function execution
|
||||||
|
status string ('Executed-Success', 'Executed-WithWarning', 'Failed' or 'Expired')
|
||||||
|
"""
|
||||||
|
self.api_func = api_func
|
||||||
|
if call_handler:
|
||||||
|
self.call_handler = call_handler
|
||||||
|
else:
|
||||||
|
self.call_handler = self.default_handler
|
||||||
|
|
||||||
|
def default_handler(self, data: dict) -> (dict, str):
|
||||||
|
"""default handler, used in case no call handler is provided."""
|
||||||
|
logger.error("no handler function for request: %s" % str(data))
|
||||||
|
return {}, 'Failed'
|
||||||
|
|
||||||
|
def call(self, request: Request) -> str:
|
||||||
|
""" Process an incoming request.
|
||||||
|
Args:
|
||||||
|
request : request object as received using twisted.web.server
|
||||||
|
Returns:
|
||||||
|
encoded JSON string (HTTP response code and headers are set by calling the appropriate methods on the
|
||||||
|
provided the request object)
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Make sure the request is done with the correct HTTP method
|
||||||
|
if (request.method.decode() != self.api_func.http_method):
|
||||||
|
raise ValueError('Wrong HTTP method %s!=%s' % (request.method.decode(), self.api_func.http_method))
|
||||||
|
|
||||||
|
# Decode the request
|
||||||
|
decoded_request = self.api_func.decode_server(json.loads(request.content.read()))
|
||||||
|
|
||||||
|
# Run call handler (see above)
|
||||||
|
data, fe_status = self.call_handler(decoded_request)
|
||||||
|
|
||||||
|
# In case a function execution status is returned, use it to generate and prepend the header field according to
|
||||||
|
# SGP.22, section 6.5.1.2 and 6.5.1.4 (the presence of the header filed is checked by the encode_server method)
|
||||||
|
if fe_status:
|
||||||
|
data = {'header' : {'functionExecutionStatus': {'status' : fe_status}}} | data
|
||||||
|
|
||||||
|
# Encode the message (the presence of mandatory fields is checked during encoding)
|
||||||
|
encoded = json.dumps(self.api_func.encode_server(data))
|
||||||
|
|
||||||
|
# Apply HTTP request headers according to SGP.22, section 6.5.1
|
||||||
|
res_headers = {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'X-Admin-Protocol': 'gsma/rsp/v2.5.0',
|
||||||
|
}
|
||||||
|
res_headers.update(self.api_func.extra_http_res_headers)
|
||||||
|
for header, value in res_headers.items():
|
||||||
|
request.setHeader(header, value)
|
||||||
|
request.setResponseCode(self.api_func.expected_http_status)
|
||||||
|
|
||||||
|
# Return the encoded result back to the caller for sending (using twisted/klein)
|
||||||
|
return encoded
|
||||||
|
|
||||||
|
|||||||
@@ -151,8 +151,6 @@ class File:
|
|||||||
self.df_name = None
|
self.df_name = None
|
||||||
self.fill_pattern = None
|
self.fill_pattern = None
|
||||||
self.fill_pattern_repeat = False
|
self.fill_pattern_repeat = False
|
||||||
self.pstdo = None # pinStatusTemplateDO, mandatory for DF/ADF
|
|
||||||
self.lcsi = None # optional life cycle status indicator
|
|
||||||
# apply some defaults from profile
|
# apply some defaults from profile
|
||||||
if self.template:
|
if self.template:
|
||||||
self.from_template(self.template)
|
self.from_template(self.template)
|
||||||
@@ -280,8 +278,6 @@ class File:
|
|||||||
elif self.file_type in ['MF', 'DF', 'ADF']:
|
elif self.file_type in ['MF', 'DF', 'ADF']:
|
||||||
fdb_dec['file_type'] = 'df'
|
fdb_dec['file_type'] = 'df'
|
||||||
fdb_dec['structure'] = 'no_info_given'
|
fdb_dec['structure'] = 'no_info_given'
|
||||||
# pinStatusTemplateDO is mandatory for DF/ADF
|
|
||||||
fileDescriptor['pinStatusTemplateDO'] = self.pstdo
|
|
||||||
# build file descriptor based on above input data
|
# build file descriptor based on above input data
|
||||||
fd_dict = {}
|
fd_dict = {}
|
||||||
if len(fdb_dec):
|
if len(fdb_dec):
|
||||||
@@ -308,8 +304,6 @@ class File:
|
|||||||
# desired fill or repeat pattern in the "proprietaryEFInfo" element for the EF in Profiles
|
# desired fill or repeat pattern in the "proprietaryEFInfo" element for the EF in Profiles
|
||||||
# downloaded to a V2.2 or earlier eUICC.
|
# downloaded to a V2.2 or earlier eUICC.
|
||||||
fileDescriptor['proprietaryEFInfo'] = pefi
|
fileDescriptor['proprietaryEFInfo'] = pefi
|
||||||
if self.lcsi:
|
|
||||||
fileDescriptor['lcsi'] = self.lcsi
|
|
||||||
logger.debug("%s: to_fileDescriptor(%s)" % (self, fileDescriptor))
|
logger.debug("%s: to_fileDescriptor(%s)" % (self, fileDescriptor))
|
||||||
return fileDescriptor
|
return fileDescriptor
|
||||||
|
|
||||||
@@ -329,8 +323,6 @@ class File:
|
|||||||
if efFileSize:
|
if efFileSize:
|
||||||
self._file_size = self._decode_file_size(efFileSize)
|
self._file_size = self._decode_file_size(efFileSize)
|
||||||
|
|
||||||
self.pstdo = fileDescriptor.get('pinStatusTemplateDO', None)
|
|
||||||
self.lcsi = fileDescriptor.get('lcsi', None)
|
|
||||||
pefi = fileDescriptor.get('proprietaryEFInfo', {})
|
pefi = fileDescriptor.get('proprietaryEFInfo', {})
|
||||||
securityAttributesReferenced = fileDescriptor.get('securityAttributesReferenced', None)
|
securityAttributesReferenced = fileDescriptor.get('securityAttributesReferenced', None)
|
||||||
if securityAttributesReferenced:
|
if securityAttributesReferenced:
|
||||||
@@ -441,7 +433,7 @@ class File:
|
|||||||
elif k == 'fillFileContent':
|
elif k == 'fillFileContent':
|
||||||
stream.write(v)
|
stream.write(v)
|
||||||
else:
|
else:
|
||||||
raise ValueError("Unknown key '%s' in tuple list" % k)
|
return ValueError("Unknown key '%s' in tuple list" % k)
|
||||||
return stream.getvalue()
|
return stream.getvalue()
|
||||||
|
|
||||||
def file_content_to_tuples(self, optimize:bool = False) -> List[Tuple]:
|
def file_content_to_tuples(self, optimize:bool = False) -> List[Tuple]:
|
||||||
|
|||||||
@@ -57,18 +57,15 @@ class BatchPersonalization:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
class ParamAndSrc:
|
class ParamAndSrc:
|
||||||
"""tie a ConfigurableParameter to a source of actual values"""
|
'tie a ConfigurableParameter to a source of actual values'
|
||||||
def __init__(self, param: ConfigurableParameter, src: param_source.ParamSource):
|
def __init__(self, param: ConfigurableParameter, src: param_source.ParamSource):
|
||||||
if isinstance(param, type):
|
self.param = param
|
||||||
self.param_cls = param
|
|
||||||
else:
|
|
||||||
self.param_cls = param.__class__
|
|
||||||
self.src = src
|
self.src = src
|
||||||
|
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
n: int,
|
n: int,
|
||||||
src_pes: ProfileElementSequence,
|
src_pes: ProfileElementSequence,
|
||||||
params: list[ParamAndSrc]=[],
|
params: list[ParamAndSrc]=None,
|
||||||
csv_rows: Generator=None,
|
csv_rows: Generator=None,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
@@ -77,10 +74,10 @@ class BatchPersonalization:
|
|||||||
copied.
|
copied.
|
||||||
params: list of ParamAndSrc instances, defining a ConfigurableParameter and corresponding ParamSource to fill in
|
params: list of ParamAndSrc instances, defining a ConfigurableParameter and corresponding ParamSource to fill in
|
||||||
profile values.
|
profile values.
|
||||||
csv_rows: A generator (e.g. iter(list_of_rows)) producing all CSV rows one at a time, starting with a row
|
csv_rows: A list or generator producing all CSV rows one at a time, starting with a row containing the column
|
||||||
containing the column headers. This is compatible with the python csv.reader. Each row gets passed to
|
headers. This is compatible with the python csv.reader. Each row gets passed to
|
||||||
ParamSource.get_next(), such that ParamSource implementations can access the row items. See
|
ParamSource.get_next(), such that ParamSource implementations can access the row items.
|
||||||
param_source.CsvSource.
|
See param_source.CsvSource.
|
||||||
"""
|
"""
|
||||||
self.n = n
|
self.n = n
|
||||||
self.params = params or []
|
self.params = params or []
|
||||||
@@ -88,7 +85,7 @@ class BatchPersonalization:
|
|||||||
self.csv_rows = csv_rows
|
self.csv_rows = csv_rows
|
||||||
|
|
||||||
def add_param_and_src(self, param:ConfigurableParameter, src:param_source.ParamSource):
|
def add_param_and_src(self, param:ConfigurableParameter, src:param_source.ParamSource):
|
||||||
self.params.append(BatchPersonalization.ParamAndSrc(param, src))
|
self.params.append(BatchPersonalization.ParamAndSrc(param=param, src=src))
|
||||||
|
|
||||||
def generate_profiles(self):
|
def generate_profiles(self):
|
||||||
# get first row of CSV: column names
|
# get first row of CSV: column names
|
||||||
@@ -115,10 +112,10 @@ class BatchPersonalization:
|
|||||||
try:
|
try:
|
||||||
input_value = p.src.get_next(csv_row=csv_row)
|
input_value = p.src.get_next(csv_row=csv_row)
|
||||||
assert input_value is not None
|
assert input_value is not None
|
||||||
value = p.param_cls.validate_val(input_value)
|
value = p.param.__class__.validate_val(input_value)
|
||||||
p.param_cls.apply_val(pes, value)
|
p.param.__class__.apply_val(pes, value)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise ValueError(f'{p.param_cls.get_name()} fed by {p.src.name}: {e}') from e
|
raise ValueError(f'{p.param.name} fed by {p.src.name}: {e}') from e
|
||||||
|
|
||||||
yield pes
|
yield pes
|
||||||
|
|
||||||
@@ -132,7 +129,7 @@ class UppAudit(dict):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_der(cls, der: bytes, params: List, der_size=False, additional_sd_keys=False):
|
def from_der(cls, der: bytes, params: List, der_size=False, additional_sd_keys=False):
|
||||||
"""return a dict of parameter name and set of selected parameter values found in a DER encoded profile. Note:
|
'''return a dict of parameter name and set of selected parameter values found in a DER encoded profile. Note:
|
||||||
some ConfigurableParameter implementations return more than one key-value pair, for example, Imsi returns
|
some ConfigurableParameter implementations return more than one key-value pair, for example, Imsi returns
|
||||||
both 'IMSI' and 'IMSI-ACC' parameters.
|
both 'IMSI' and 'IMSI-ACC' parameters.
|
||||||
|
|
||||||
@@ -154,7 +151,7 @@ class UppAudit(dict):
|
|||||||
Scp80Kvn03. So we would not show kvn 0x04..0x0f in an audit. additional_sd_keys=True includes audits of all SD
|
Scp80Kvn03. So we would not show kvn 0x04..0x0f in an audit. additional_sd_keys=True includes audits of all SD
|
||||||
key KVN there may be in the UPP. This helps to spot SD keys that may already be present in a UPP template, with
|
key KVN there may be in the UPP. This helps to spot SD keys that may already be present in a UPP template, with
|
||||||
unexpected / unusual kvn.
|
unexpected / unusual kvn.
|
||||||
"""
|
'''
|
||||||
|
|
||||||
# make an instance of this class
|
# make an instance of this class
|
||||||
upp_audit = cls()
|
upp_audit = cls()
|
||||||
@@ -320,7 +317,7 @@ class BatchAudit(list):
|
|||||||
return batch_audit
|
return batch_audit
|
||||||
|
|
||||||
def to_csv_rows(self, headers=True, sort_key=None):
|
def to_csv_rows(self, headers=True, sort_key=None):
|
||||||
"""generator that yields all audits' values as rows, useful feed to a csv.writer."""
|
'''generator that yields all audits' values as rows, useful feed to a csv.writer.'''
|
||||||
columns = set()
|
columns = set()
|
||||||
for audit in self:
|
for audit in self:
|
||||||
columns.update(audit.keys())
|
columns.update(audit.keys())
|
||||||
|
|||||||
@@ -37,10 +37,13 @@ class ParamSource:
|
|||||||
name = "none"
|
name = "none"
|
||||||
numeric_base = None # or 10 or 16
|
numeric_base = None # or 10 or 16
|
||||||
|
|
||||||
def __init__(self, input_str:str):
|
@classmethod
|
||||||
"""Subclasses should call super().__init__(input_str) before evaluating self.input_str. Each subclass __init__()
|
def from_str(cls, s:str):
|
||||||
may in turn manipulate self.input_str to apply expansions or decodings."""
|
"""Subclasses implement this:
|
||||||
self.input_str = input_str
|
if a parameter source defines some string input magic, override this function.
|
||||||
|
For example, a RandomDigitSource derives the number of digits from the string length,
|
||||||
|
so the user can enter '0000' to get a four digit random number."""
|
||||||
|
return cls(s)
|
||||||
|
|
||||||
def get_next(self, csv_row:dict=None):
|
def get_next(self, csv_row:dict=None):
|
||||||
"""Subclasses implement this: return the next value from the parameter source.
|
"""Subclasses implement this: return the next value from the parameter source.
|
||||||
@@ -48,81 +51,78 @@ class ParamSource:
|
|||||||
This default implementation is an empty source."""
|
This default implementation is an empty source."""
|
||||||
raise ParamSourceExhaustedExn()
|
raise ParamSourceExhaustedExn()
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_str(cls, input_str:str):
|
|
||||||
"""compatibility with earlier version of ParamSource. Just use the constructor."""
|
|
||||||
return cls(input_str)
|
|
||||||
|
|
||||||
class ConstantSource(ParamSource):
|
class ConstantSource(ParamSource):
|
||||||
"""one value for all"""
|
"""one value for all"""
|
||||||
name = "constant"
|
name = "constant"
|
||||||
|
|
||||||
|
def __init__(self, val:str):
|
||||||
|
self.val = val
|
||||||
|
|
||||||
def get_next(self, csv_row:dict=None):
|
def get_next(self, csv_row:dict=None):
|
||||||
return self.input_str
|
return self.val
|
||||||
|
|
||||||
class InputExpandingParamSource(ParamSource):
|
class InputExpandingParamSource(ParamSource):
|
||||||
|
|
||||||
def __init__(self, input_str:str):
|
|
||||||
super().__init__(input_str)
|
|
||||||
self.input_str = self.expand_input_str(self.input_str)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def expand_input_str(cls, input_str:str):
|
def expand_str(cls, s:str):
|
||||||
# user convenience syntax '0*32' becomes '00000000000000000000000000000000'
|
# user convenience syntax '0*32' becomes '00000000000000000000000000000000'
|
||||||
if "*" not in input_str:
|
if "*" not in s:
|
||||||
return input_str
|
return s
|
||||||
# re: "XX * 123" with optional spaces
|
tokens = re.split(r"([^ \t]+)[ \t]*\*[ \t]*([0-9]+)", s)
|
||||||
tokens = re.split(r"([^ \t]+)[ \t]*\*[ \t]*([0-9]+)", input_str)
|
|
||||||
if len(tokens) < 3:
|
if len(tokens) < 3:
|
||||||
return input_str
|
return s
|
||||||
parts = []
|
parts = []
|
||||||
for unchanged, snippet, repeat_str in zip(tokens[0::3], tokens[1::3], tokens[2::3]):
|
for unchanged, snippet, repeat_str in zip(tokens[0::3], tokens[1::3], tokens[2::3]):
|
||||||
parts.append(unchanged)
|
parts.append(unchanged)
|
||||||
repeat = int(repeat_str)
|
repeat = int(repeat_str)
|
||||||
parts.append(snippet * repeat)
|
parts.append(snippet * repeat)
|
||||||
|
|
||||||
return "".join(parts)
|
return "".join(parts)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_str(cls, s:str):
|
||||||
|
return cls(cls.expand_str(s))
|
||||||
|
|
||||||
class DecimalRangeSource(InputExpandingParamSource):
|
class DecimalRangeSource(InputExpandingParamSource):
|
||||||
"""abstract: decimal numbers with a value range"""
|
"""abstract: decimal numbers with a value range"""
|
||||||
|
|
||||||
numeric_base = 10
|
numeric_base = 10
|
||||||
|
|
||||||
def __init__(self, input_str:str=None, num_digits:int=None, first_value:int=None, last_value:int=None):
|
def __init__(self, num_digits, first_value, last_value):
|
||||||
"""Constructor to set up values from a (user entered) string: DecimalRangeSource(input_str).
|
|
||||||
Constructor to set up values directly: DecimalRangeSource(num_digits=3, first_value=123, last_value=456)
|
|
||||||
|
|
||||||
num_digits produces leading zeros when first_value..last_value are shorter.
|
|
||||||
"""
|
"""
|
||||||
assert ((input_str is not None and (num_digits, first_value, last_value) == (None, None, None))
|
See also from_str().
|
||||||
or (input_str is None and None not in (num_digits, first_value, last_value)))
|
|
||||||
|
|
||||||
if input_str is not None:
|
|
||||||
super().__init__(input_str)
|
|
||||||
|
|
||||||
input_str = self.input_str
|
|
||||||
|
|
||||||
if ".." in input_str:
|
|
||||||
first_str, last_str = input_str.split('..')
|
|
||||||
first_str = first_str.strip()
|
|
||||||
last_str = last_str.strip()
|
|
||||||
else:
|
|
||||||
first_str = input_str.strip()
|
|
||||||
last_str = None
|
|
||||||
|
|
||||||
num_digits = len(first_str)
|
|
||||||
first_value = int(first_str)
|
|
||||||
last_value = int(last_str if last_str is not None else "9" * num_digits)
|
|
||||||
|
|
||||||
|
All arguments are integer values, and are converted to int if necessary, so a string of an integer is fine.
|
||||||
|
num_digits: fixed number of digits (possibly with leading zeros) to generate.
|
||||||
|
first_value, last_value: the decimal range in which to provide digits.
|
||||||
|
"""
|
||||||
|
num_digits = int(num_digits)
|
||||||
|
first_value = int(first_value)
|
||||||
|
last_value = int(last_value)
|
||||||
assert num_digits > 0
|
assert num_digits > 0
|
||||||
assert first_value <= last_value
|
assert first_value <= last_value
|
||||||
self.num_digits = num_digits
|
self.num_digits = num_digits
|
||||||
self.first_value = first_value
|
self.val_first_last = (first_value, last_value)
|
||||||
self.last_value = last_value
|
|
||||||
|
|
||||||
def val_to_digit(self, val:int):
|
def val_to_digit(self, val:int):
|
||||||
return "%0*d" % (self.num_digits, val) # pylint: disable=consider-using-f-string
|
return "%0*d" % (self.num_digits, val) # pylint: disable=consider-using-f-string
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_str(cls, s:str):
|
||||||
|
s = cls.expand_str(s)
|
||||||
|
|
||||||
|
if ".." in s:
|
||||||
|
first_str, last_str = s.split('..')
|
||||||
|
first_str = first_str.strip()
|
||||||
|
last_str = last_str.strip()
|
||||||
|
else:
|
||||||
|
first_str = s.strip()
|
||||||
|
last_str = None
|
||||||
|
|
||||||
|
first_value = int(first_str)
|
||||||
|
last_value = int(last_str) if last_str is not None else "9" * len(first_str)
|
||||||
|
return cls(num_digits=len(first_str), first_value=first_value, last_value=last_value)
|
||||||
|
|
||||||
class RandomSourceMixin:
|
class RandomSourceMixin:
|
||||||
random_impl = secrets.SystemRandom()
|
random_impl = secrets.SystemRandom()
|
||||||
|
|
||||||
@@ -135,7 +135,7 @@ class RandomDigitSource(DecimalRangeSource, RandomSourceMixin):
|
|||||||
# try to generate random digits that are always different from previously produced random bytes
|
# try to generate random digits that are always different from previously produced random bytes
|
||||||
attempts = 10
|
attempts = 10
|
||||||
while True:
|
while True:
|
||||||
val = self.random_impl.randint(self.first_value, self.last_value)
|
val = self.random_impl.randint(*self.val_first_last)
|
||||||
if val in RandomDigitSource.used_keys:
|
if val in RandomDigitSource.used_keys:
|
||||||
attempts -= 1
|
attempts -= 1
|
||||||
if attempts:
|
if attempts:
|
||||||
@@ -150,11 +150,9 @@ class RandomHexDigitSource(InputExpandingParamSource, RandomSourceMixin):
|
|||||||
numeric_base = 16
|
numeric_base = 16
|
||||||
used_keys = set()
|
used_keys = set()
|
||||||
|
|
||||||
def __init__(self, input_str:str):
|
def __init__(self, num_digits):
|
||||||
super().__init__(input_str)
|
"""see from_str()"""
|
||||||
input_str = self.input_str
|
num_digits = int(num_digits)
|
||||||
|
|
||||||
num_digits = len(input_str.strip())
|
|
||||||
if num_digits < 1:
|
if num_digits < 1:
|
||||||
raise ValueError("zero number of digits")
|
raise ValueError("zero number of digits")
|
||||||
# hex digits always come in two
|
# hex digits always come in two
|
||||||
@@ -176,20 +174,23 @@ class RandomHexDigitSource(InputExpandingParamSource, RandomSourceMixin):
|
|||||||
|
|
||||||
return b2h(val)
|
return b2h(val)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_str(cls, s:str):
|
||||||
|
s = cls.expand_str(s)
|
||||||
|
return cls(num_digits=len(s.strip()))
|
||||||
|
|
||||||
class IncDigitSource(DecimalRangeSource):
|
class IncDigitSource(DecimalRangeSource):
|
||||||
"""incrementing sequence of digits"""
|
"""incrementing sequence of digits"""
|
||||||
name = "incrementing decimal digits"
|
name = "incrementing decimal digits"
|
||||||
|
|
||||||
def __init__(self, input_str:str=None, num_digits:int=None, first_value:int=None, last_value:int=None):
|
def __init__(self, num_digits, first_value, last_value):
|
||||||
"""input_str: the first value to return, a string of an integer number with optional leading zero digits. The
|
super().__init__(num_digits, first_value, last_value)
|
||||||
leading zero digits are preserved."""
|
|
||||||
super().__init__(input_str, num_digits, first_value, last_value)
|
|
||||||
self.next_val = None
|
self.next_val = None
|
||||||
self.reset()
|
self.reset()
|
||||||
|
|
||||||
def reset(self):
|
def reset(self):
|
||||||
"""Restart from the first value of the defined range passed to __init__()."""
|
"""Restart from the first value of the defined range passed to __init__()."""
|
||||||
self.next_val = self.first_value
|
self.next_val = self.val_first_last[0]
|
||||||
|
|
||||||
def get_next(self, csv_row:dict=None):
|
def get_next(self, csv_row:dict=None):
|
||||||
val = self.next_val
|
val = self.next_val
|
||||||
@@ -199,7 +200,7 @@ class IncDigitSource(DecimalRangeSource):
|
|||||||
returnval = self.val_to_digit(val)
|
returnval = self.val_to_digit(val)
|
||||||
|
|
||||||
val += 1
|
val += 1
|
||||||
if val > self.last_value:
|
if val > self.val_first_last[1]:
|
||||||
self.next_val = None
|
self.next_val = None
|
||||||
else:
|
else:
|
||||||
self.next_val = val
|
self.next_val = val
|
||||||
@@ -210,15 +211,13 @@ class CsvSource(ParamSource):
|
|||||||
"""apply a column from a CSV row, as passed in to ParamSource.get_next(csv_row)"""
|
"""apply a column from a CSV row, as passed in to ParamSource.get_next(csv_row)"""
|
||||||
name = "from CSV"
|
name = "from CSV"
|
||||||
|
|
||||||
def __init__(self, input_str:str):
|
def __init__(self, csv_column):
|
||||||
"""self.csv_column = input_str:
|
|
||||||
column name indicating the column to use for this parameter.
|
|
||||||
This name is used in get_next(): the caller passes the current CSV row to get_next(), from which
|
|
||||||
CsvSource picks the column with the name matching csv_column.
|
|
||||||
"""
|
"""
|
||||||
"""Parse input_str into self.num_digits, self.first_value, self.last_value."""
|
csv_column: column name indicating the column to use for this parameter.
|
||||||
super().__init__(input_str)
|
This name is used in get_next(): the caller passes the current CSV row to get_next(), from which
|
||||||
self.csv_column = self.input_str
|
CsvSource picks the column with the name matching csv_column.
|
||||||
|
"""
|
||||||
|
self.csv_column = csv_column
|
||||||
|
|
||||||
def get_next(self, csv_row:dict=None):
|
def get_next(self, csv_row:dict=None):
|
||||||
val = None
|
val = None
|
||||||
|
|||||||
@@ -22,11 +22,9 @@ import re
|
|||||||
import pprint
|
import pprint
|
||||||
from typing import List, Tuple, Generator, Optional
|
from typing import List, Tuple, Generator, Optional
|
||||||
|
|
||||||
from construct.core import StreamError
|
|
||||||
from osmocom.tlv import camel_to_snake
|
from osmocom.tlv import camel_to_snake
|
||||||
from osmocom.utils import hexstr
|
from osmocom.utils import hexstr
|
||||||
from pySim.utils import enc_iccid, dec_iccid, enc_imsi, dec_imsi, h2b, b2h, rpad, sanitize_iccid
|
from pySim.utils import enc_iccid, dec_iccid, enc_imsi, dec_imsi, h2b, b2h, rpad, sanitize_iccid
|
||||||
from pySim.ts_31_102 import EF_AD
|
|
||||||
from pySim.ts_51_011 import EF_SMSP
|
from pySim.ts_51_011 import EF_SMSP
|
||||||
from pySim.esim.saip import param_source
|
from pySim.esim.saip import param_source
|
||||||
from pySim.esim.saip import ProfileElement, ProfileElementSD, ProfileElementSequence
|
from pySim.esim.saip import ProfileElement, ProfileElementSD, ProfileElementSequence
|
||||||
@@ -57,6 +55,22 @@ class ClassVarMeta(abc.ABCMeta):
|
|||||||
setattr(x, k, v)
|
setattr(x, k, v)
|
||||||
return x
|
return x
|
||||||
|
|
||||||
|
def file_tuples_content_as_bytes(l: List[Tuple]) -> Optional[bytes]:
|
||||||
|
"""linearize a list of fillFileContent / fillFileOffset tuples into a stream of bytes."""
|
||||||
|
stream = io.BytesIO()
|
||||||
|
for k, v in l:
|
||||||
|
if k == 'doNotCreate':
|
||||||
|
return None
|
||||||
|
if k == 'fileDescriptor':
|
||||||
|
pass
|
||||||
|
elif k == 'fillFileOffset':
|
||||||
|
stream.seek(v, os.SEEK_CUR)
|
||||||
|
elif k == 'fillFileContent':
|
||||||
|
stream.write(v)
|
||||||
|
else:
|
||||||
|
return ValueError("Unknown key '%s' in tuple list" % k)
|
||||||
|
return stream.getvalue()
|
||||||
|
|
||||||
class ConfigurableParameter(abc.ABC, metaclass=ClassVarMeta):
|
class ConfigurableParameter(abc.ABC, metaclass=ClassVarMeta):
|
||||||
r"""Base class representing a part of the eSIM profile that is configurable during the
|
r"""Base class representing a part of the eSIM profile that is configurable during the
|
||||||
personalization process (with dynamic data from elsewhere).
|
personalization process (with dynamic data from elsewhere).
|
||||||
@@ -330,7 +344,6 @@ class DecimalHexParam(DecimalParam):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def validate_val(cls, val):
|
def validate_val(cls, val):
|
||||||
val = super().validate_val(val)
|
val = super().validate_val(val)
|
||||||
assert isinstance(val, str)
|
|
||||||
val = ''.join('%02x' % ord(x) for x in val)
|
val = ''.join('%02x' % ord(x) for x in val)
|
||||||
if cls.rpad is not None:
|
if cls.rpad is not None:
|
||||||
c = cls.rpad_char
|
c = cls.rpad_char
|
||||||
@@ -340,7 +353,7 @@ class DecimalHexParam(DecimalParam):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def decimal_hex_to_str(cls, val):
|
def decimal_hex_to_str(cls, val):
|
||||||
"""useful for get_values_from_pes() implementations of subclasses"""
|
'useful for get_values_from_pes() implementations of subclasses'
|
||||||
if isinstance(val, bytes):
|
if isinstance(val, bytes):
|
||||||
val = b2h(val)
|
val = b2h(val)
|
||||||
assert isinstance(val, hexstr)
|
assert isinstance(val, hexstr)
|
||||||
@@ -620,7 +633,7 @@ class SmspTpScAddr(ConfigurableParameter):
|
|||||||
# ensure the parameter_indicators.tp_sc_addr is True
|
# ensure the parameter_indicators.tp_sc_addr is True
|
||||||
ef_smsp_dec['parameter_indicators']['tp_sc_addr'] = True
|
ef_smsp_dec['parameter_indicators']['tp_sc_addr'] = True
|
||||||
# re-encode into the File body
|
# re-encode into the File body
|
||||||
f_smsp.body = ef_smsp.encode_record_bin(ef_smsp_dec, 1, 52)
|
f_smsp.body = ef_smsp.encode_record_bin(ef_smsp_dec, 1)
|
||||||
#print("SMSP (new): %s" % f_smsp.body)
|
#print("SMSP (new): %s" % f_smsp.body)
|
||||||
# re-generate the pe.decoded member from the File instance
|
# re-generate the pe.decoded member from the File instance
|
||||||
pe.file2pe(f_smsp)
|
pe.file2pe(f_smsp)
|
||||||
@@ -649,71 +662,6 @@ class SmspTpScAddr(ConfigurableParameter):
|
|||||||
yield { cls.name: cls.tuple_to_str((international, digits)) }
|
yield { cls.name: cls.tuple_to_str((international, digits)) }
|
||||||
|
|
||||||
|
|
||||||
class MncLen(ConfigurableParameter):
|
|
||||||
"""MNC length. Must be either 2 or 3. Sets only the MNC length field in EF-AD (Administrative Data)."""
|
|
||||||
name = 'MNC-LEN'
|
|
||||||
allow_chars = '23'
|
|
||||||
strip_chars = ' \t\r\n'
|
|
||||||
numeric_base = 10
|
|
||||||
max_len = 1
|
|
||||||
min_len = 1
|
|
||||||
example_input = '2'
|
|
||||||
default_source = param_source.ConstantSource
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def validate_val(cls, val):
|
|
||||||
val = super().validate_val(val)
|
|
||||||
val = int(val)
|
|
||||||
if val not in (2, 3):
|
|
||||||
raise ValueError(f"MNC-LEN must be either 2 or 3, not {val!r}")
|
|
||||||
return val
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def apply_val(cls, pes: ProfileElementSequence, val):
|
|
||||||
"""val must be an int: either 2 or 3"""
|
|
||||||
for pe in pes.get_pes_for_type('usim'):
|
|
||||||
if not hasattr(pe, 'files'):
|
|
||||||
continue
|
|
||||||
# decode existing values
|
|
||||||
f_ad = pe.files['ef-ad']
|
|
||||||
if not f_ad.body:
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
ef_ad = EF_AD()
|
|
||||||
ef_ad_dec = ef_ad.decode_bin(f_ad.body)
|
|
||||||
except StreamError:
|
|
||||||
continue
|
|
||||||
if 'mnc_len' not in ef_ad_dec:
|
|
||||||
continue
|
|
||||||
# change mnc_len
|
|
||||||
ef_ad_dec['mnc_len'] = val
|
|
||||||
# re-encode into the File body
|
|
||||||
f_ad.body = ef_ad.encode_bin(ef_ad_dec)
|
|
||||||
pe.file2pe(f_ad)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_values_from_pes(cls, pes: ProfileElementSequence):
|
|
||||||
for naa in ('isim',):# 'isim', 'csim'):
|
|
||||||
for pe in pes.get_pes_for_type(naa):
|
|
||||||
if not hasattr(pe, 'files'):
|
|
||||||
continue
|
|
||||||
f_ad = pe.files.get('ef-ad', None)
|
|
||||||
if f_ad is None:
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
ef_ad = EF_AD()
|
|
||||||
ef_ad_dec = ef_ad.decode_bin(f_ad.body)
|
|
||||||
except StreamError:
|
|
||||||
continue
|
|
||||||
|
|
||||||
mnc_len = ef_ad_dec.get('mnc_len', None)
|
|
||||||
if mnc_len is None:
|
|
||||||
continue
|
|
||||||
|
|
||||||
yield { cls.name: str(mnc_len) }
|
|
||||||
|
|
||||||
|
|
||||||
class SdKey(BinaryParam):
|
class SdKey(BinaryParam):
|
||||||
"""Configurable Security Domain (SD) Key. Value is presented as bytes.
|
"""Configurable Security Domain (SD) Key. Value is presented as bytes.
|
||||||
Non-abstract implementations are generated in SdKey.generate_sd_key_classes"""
|
Non-abstract implementations are generated in SdKey.generate_sd_key_classes"""
|
||||||
|
|||||||
@@ -30,7 +30,6 @@ import tempfile
|
|||||||
import json
|
import json
|
||||||
import abc
|
import abc
|
||||||
import inspect
|
import inspect
|
||||||
import os
|
|
||||||
|
|
||||||
import cmd2
|
import cmd2
|
||||||
from cmd2 import CommandSet, with_default_category
|
from cmd2 import CommandSet, with_default_category
|
||||||
@@ -553,85 +552,6 @@ class CardADF(CardDF):
|
|||||||
return lchan.selected_file.application.export(as_json, lchan)
|
return lchan.selected_file.application.export(as_json, lchan)
|
||||||
|
|
||||||
|
|
||||||
class JsonEditor:
|
|
||||||
"""Context manager for editing a JSON-encoded EF value in an external editor.
|
|
||||||
|
|
||||||
Writes the current JSON value (plus encode/decode examples as //-comments)
|
|
||||||
to a temporary file, opens the user's editor, then reads the result back
|
|
||||||
(stripping comment lines) and returns it as the context variable::
|
|
||||||
|
|
||||||
with JsonEditor(self._cmd, orig_json, ef) as edited_json:
|
|
||||||
if edited_json != orig_json:
|
|
||||||
...write back...
|
|
||||||
"""
|
|
||||||
def __init__(self, cmd, orig_json, ef):
|
|
||||||
self._cmd = cmd
|
|
||||||
self._orig_json = orig_json
|
|
||||||
self._ef = ef
|
|
||||||
self._file = None
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _strip_comments(text: str) -> str:
|
|
||||||
"""Strip //-comment lines from text before JSON parsing."""
|
|
||||||
# TODO: also strip inline comments?
|
|
||||||
return '\n'.join(line for line in text.splitlines() if not line.lstrip().startswith('//'))
|
|
||||||
|
|
||||||
def _append_examples_as_comments(self, text_file) -> None:
|
|
||||||
"""Append encode/decode test vectors as //-comment lines to an open file.
|
|
||||||
The examples are taken from _test_de_encode and _test_decode class
|
|
||||||
attributes (same source as the auto-generated filesystem documentation).
|
|
||||||
The comment block is intentionally ignored on read-back by _strip_comments."""
|
|
||||||
vectors = []
|
|
||||||
for attr in ('_test_de_encode', '_test_decode'):
|
|
||||||
v = getattr(type(self._ef), attr, None)
|
|
||||||
if v:
|
|
||||||
vectors.extend(v)
|
|
||||||
if not vectors:
|
|
||||||
return
|
|
||||||
ef = self._ef
|
|
||||||
parts = [ef.fully_qualified_path_str()]
|
|
||||||
if ef.fid:
|
|
||||||
parts.append(f'({ef.fid.upper()})')
|
|
||||||
if ef.desc:
|
|
||||||
parts.append(f'- {ef.desc}')
|
|
||||||
text_file.write(f'\n\n// {" ".join(parts)}\n')
|
|
||||||
text_file.write('// Examples (ignored on save):\n')
|
|
||||||
for t in vectors:
|
|
||||||
if len(t) >= 3:
|
|
||||||
encoded, record_nr, decoded = t[0], t[1], t[2]
|
|
||||||
text_file.write(f'// record {record_nr}: {encoded}\n')
|
|
||||||
else:
|
|
||||||
encoded, decoded = t[0], t[1]
|
|
||||||
text_file.write(f'// file: {encoded}\n')
|
|
||||||
for line in json.dumps(decoded, indent=4, cls=JsonEncoder).splitlines():
|
|
||||||
text_file.write(f'// {line}\n')
|
|
||||||
|
|
||||||
def __enter__(self) -> object:
|
|
||||||
"""Write JSON + examples to a temp file, run the editor, return parsed result.
|
|
||||||
|
|
||||||
On JSONDecodeError the user is offered the option to re-open the file
|
|
||||||
and fix the mistake interactively. The temp file is removed by __exit__()
|
|
||||||
on success, or when the user declines to retry."""
|
|
||||||
self._file = tempfile.NamedTemporaryFile(prefix='pysim_', suffix='.json',
|
|
||||||
mode='w', delete=False)
|
|
||||||
json.dump(self._orig_json, self._file, indent=4, cls=JsonEncoder)
|
|
||||||
self._append_examples_as_comments(self._file)
|
|
||||||
self._file.close()
|
|
||||||
while True:
|
|
||||||
self._cmd.run_editor(self._file.name)
|
|
||||||
try:
|
|
||||||
with open(self._file.name, 'r') as f:
|
|
||||||
return json.loads(self._strip_comments(f.read()))
|
|
||||||
except json.JSONDecodeError as e:
|
|
||||||
self._cmd.perror(f'Invalid JSON: {e}')
|
|
||||||
answer = self._cmd.read_input('Re-open file for editing? [y]es/[n]o: ')
|
|
||||||
if answer not in ('y', 'yes'):
|
|
||||||
return self._orig_json
|
|
||||||
|
|
||||||
def __exit__(self, *args):
|
|
||||||
os.unlink(self._file.name)
|
|
||||||
|
|
||||||
|
|
||||||
class CardEF(CardFile):
|
class CardEF(CardFile):
|
||||||
"""EF (Entry File) in the smart card filesystem"""
|
"""EF (Entry File) in the smart card filesystem"""
|
||||||
|
|
||||||
@@ -737,8 +657,15 @@ class TransparentEF(CardEF):
|
|||||||
def do_edit_binary_decoded(self, _opts):
|
def do_edit_binary_decoded(self, _opts):
|
||||||
"""Edit the JSON representation of the EF contents in an editor."""
|
"""Edit the JSON representation of the EF contents in an editor."""
|
||||||
(orig_json, _sw) = self._cmd.lchan.read_binary_dec()
|
(orig_json, _sw) = self._cmd.lchan.read_binary_dec()
|
||||||
ef = self._cmd.lchan.selected_file
|
with tempfile.TemporaryDirectory(prefix='pysim_') as dirname:
|
||||||
with JsonEditor(self._cmd, orig_json, ef) as edited_json:
|
filename = '%s/file' % dirname
|
||||||
|
# write existing data as JSON to file
|
||||||
|
with open(filename, 'w') as text_file:
|
||||||
|
json.dump(orig_json, text_file, indent=4, cls=JsonEncoder)
|
||||||
|
# run a text editor
|
||||||
|
self._cmd.run_editor(filename)
|
||||||
|
with open(filename, 'r') as text_file:
|
||||||
|
edited_json = json.load(text_file)
|
||||||
if edited_json == orig_json:
|
if edited_json == orig_json:
|
||||||
self._cmd.poutput("Data not modified, skipping write")
|
self._cmd.poutput("Data not modified, skipping write")
|
||||||
else:
|
else:
|
||||||
@@ -1032,8 +959,15 @@ class LinFixedEF(CardEF):
|
|||||||
def do_edit_record_decoded(self, opts):
|
def do_edit_record_decoded(self, opts):
|
||||||
"""Edit the JSON representation of one record in an editor."""
|
"""Edit the JSON representation of one record in an editor."""
|
||||||
(orig_json, _sw) = self._cmd.lchan.read_record_dec(opts.RECORD_NR)
|
(orig_json, _sw) = self._cmd.lchan.read_record_dec(opts.RECORD_NR)
|
||||||
ef = self._cmd.lchan.selected_file
|
with tempfile.TemporaryDirectory(prefix='pysim_') as dirname:
|
||||||
with JsonEditor(self._cmd, orig_json, ef) as edited_json:
|
filename = '%s/file' % dirname
|
||||||
|
# write existing data as JSON to file
|
||||||
|
with open(filename, 'w') as text_file:
|
||||||
|
json.dump(orig_json, text_file, indent=4, cls=JsonEncoder)
|
||||||
|
# run a text editor
|
||||||
|
self._cmd.run_editor(filename)
|
||||||
|
with open(filename, 'r') as text_file:
|
||||||
|
edited_json = json.load(text_file)
|
||||||
if edited_json == orig_json:
|
if edited_json == orig_json:
|
||||||
self._cmd.poutput("Data not modified, skipping write")
|
self._cmd.poutput("Data not modified, skipping write")
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -276,7 +276,7 @@ class ListOfSupportedOptions(BER_TLV_IE, tag=0x81):
|
|||||||
class SupportedKeysForScp03(BER_TLV_IE, tag=0x82):
|
class SupportedKeysForScp03(BER_TLV_IE, tag=0x82):
|
||||||
_construct = FlagsEnum(Byte, aes128=0x01, aes192=0x02, aes256=0x04)
|
_construct = FlagsEnum(Byte, aes128=0x01, aes192=0x02, aes256=0x04)
|
||||||
class SupportedTlsCipherSuitesForScp81(BER_TLV_IE, tag=0x83):
|
class SupportedTlsCipherSuitesForScp81(BER_TLV_IE, tag=0x83):
|
||||||
_construct = GreedyRange(Int16ub)
|
_consuruct = GreedyRange(Int16ub)
|
||||||
class ScpInformation(BER_TLV_IE, tag=0xa0, nested=[ScpType, ListOfSupportedOptions, SupportedKeysForScp03,
|
class ScpInformation(BER_TLV_IE, tag=0xa0, nested=[ScpType, ListOfSupportedOptions, SupportedKeysForScp03,
|
||||||
SupportedTlsCipherSuitesForScp81]):
|
SupportedTlsCipherSuitesForScp81]):
|
||||||
pass
|
pass
|
||||||
@@ -319,7 +319,7 @@ class CurrentSecurityLevel(BER_TLV_IE, tag=0xd3):
|
|||||||
# GlobalPlatform v2.3.1 Section 11.3.3.1.3
|
# GlobalPlatform v2.3.1 Section 11.3.3.1.3
|
||||||
class ApplicationAID(BER_TLV_IE, tag=0x4f):
|
class ApplicationAID(BER_TLV_IE, tag=0x4f):
|
||||||
_construct = GreedyBytes
|
_construct = GreedyBytes
|
||||||
class ApplicationTemplate(BER_TLV_IE, tag=0x61, nested=[ApplicationAID]):
|
class ApplicationTemplate(BER_TLV_IE, tag=0x61, ntested=[ApplicationAID]):
|
||||||
pass
|
pass
|
||||||
class ListOfApplications(BER_TLV_IE, tag=0x2f00, nested=[ApplicationTemplate]):
|
class ListOfApplications(BER_TLV_IE, tag=0x2f00, nested=[ApplicationTemplate]):
|
||||||
pass
|
pass
|
||||||
@@ -562,14 +562,14 @@ class ADF_SD(CardADF):
|
|||||||
|
|
||||||
@cmd2.with_argparser(store_data_parser)
|
@cmd2.with_argparser(store_data_parser)
|
||||||
def do_store_data(self, opts):
|
def do_store_data(self, opts):
|
||||||
"""Perform the GlobalPlatform STORE DATA command in order to store some card-specific data.
|
"""Perform the GlobalPlatform GET DATA command in order to store some card-specific data.
|
||||||
See GlobalPlatform CardSpecification v2.3 Section 11.11 for details."""
|
See GlobalPlatform CardSpecification v2.3Section 11.11 for details."""
|
||||||
response_permitted = opts.response == 'may_be_returned'
|
response_permitted = opts.response == 'may_be_returned'
|
||||||
self.store_data(h2b(opts.DATA), opts.data_structure, opts.encryption, response_permitted)
|
self.store_data(h2b(opts.DATA), opts.data_structure, opts.encryption, response_permitted)
|
||||||
|
|
||||||
def store_data(self, data: bytes, structure:str = 'none', encryption:str = 'none', response_permitted: bool = False) -> bytes:
|
def store_data(self, data: bytes, structure:str = 'none', encryption:str = 'none', response_permitted: bool = False) -> bytes:
|
||||||
"""Perform the GlobalPlatform STORE DATA command in order to store some card-specific data.
|
"""Perform the GlobalPlatform GET DATA command in order to store some card-specific data.
|
||||||
See GlobalPlatform CardSpecification v2.3 Section 11.11 for details."""
|
See GlobalPlatform CardSpecification v2.3Section 11.11 for details."""
|
||||||
max_cmd_len = self._cmd.lchan.scc.max_cmd_len
|
max_cmd_len = self._cmd.lchan.scc.max_cmd_len
|
||||||
# Table 11-89 of GP Card Specification v2.3
|
# Table 11-89 of GP Card Specification v2.3
|
||||||
remainder = data
|
remainder = data
|
||||||
@@ -585,7 +585,7 @@ class ADF_SD(CardADF):
|
|||||||
data, _sw = self._cmd.lchan.scc.send_apdu_checksw(hdr + b2h(chunk) + "00")
|
data, _sw = self._cmd.lchan.scc.send_apdu_checksw(hdr + b2h(chunk) + "00")
|
||||||
block_nr += 1
|
block_nr += 1
|
||||||
response += data
|
response += data
|
||||||
return h2b(response)
|
return data
|
||||||
|
|
||||||
put_key_parser = argparse.ArgumentParser()
|
put_key_parser = argparse.ArgumentParser()
|
||||||
put_key_parser.add_argument('--old-key-version-nr', type=auto_uint8, default=0, help='Old Key Version Number')
|
put_key_parser.add_argument('--old-key-version-nr', type=auto_uint8, default=0, help='Old Key Version Number')
|
||||||
|
|||||||
@@ -17,8 +17,6 @@
|
|||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
#
|
#
|
||||||
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from osmocom.construct import *
|
from osmocom.construct import *
|
||||||
from osmocom.utils import *
|
from osmocom.utils import *
|
||||||
from osmocom.tlv import *
|
from osmocom.tlv import *
|
||||||
@@ -48,9 +46,7 @@ class InstallParams(TLV_IE_Collection, nested=[AppSpecificParams, SystemSpecific
|
|||||||
# GPD_SPE_013, table 11-49
|
# GPD_SPE_013, table 11-49
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def gen_install_parameters(non_volatile_memory_quota: Optional[int] = None,
|
def gen_install_parameters(non_volatile_memory_quota:int, volatile_memory_quota:int, stk_parameter:str):
|
||||||
volatile_memory_quota: Optional[int] = None,
|
|
||||||
stk_parameter: Optional[str] = None):
|
|
||||||
|
|
||||||
# GPD_SPE_013, table 11-49
|
# GPD_SPE_013, table 11-49
|
||||||
|
|
||||||
@@ -58,17 +54,19 @@ def gen_install_parameters(non_volatile_memory_quota: Optional[int] = None,
|
|||||||
install_params = InstallParams()
|
install_params = InstallParams()
|
||||||
install_params_dict = [{'app_specific_params': None}]
|
install_params_dict = [{'app_specific_params': None}]
|
||||||
|
|
||||||
# Collect system specific parameters (optional)
|
#Conditional
|
||||||
system_specific_params = []
|
if non_volatile_memory_quota and volatile_memory_quota and stk_parameter:
|
||||||
if non_volatile_memory_quota is not None:
|
system_specific_params = []
|
||||||
system_specific_params.append({'non_volatile_memory_quota': non_volatile_memory_quota})
|
#Optional
|
||||||
if volatile_memory_quota is not None:
|
if non_volatile_memory_quota:
|
||||||
system_specific_params.append({'volatile_memory_quota': volatile_memory_quota})
|
system_specific_params += [{'non_volatile_memory_quota': non_volatile_memory_quota}]
|
||||||
if stk_parameter is not None:
|
#Optional
|
||||||
system_specific_params.append({'stk_parameter': stk_parameter})
|
if volatile_memory_quota:
|
||||||
# Add system specific parameters to the install parameters, if any
|
system_specific_params += [{'volatile_memory_quota': volatile_memory_quota}]
|
||||||
if system_specific_params:
|
#Optional
|
||||||
install_params_dict.append({'system_specific_params': system_specific_params})
|
if stk_parameter:
|
||||||
|
system_specific_params += [{'stk_parameter': stk_parameter}]
|
||||||
|
install_params_dict += [{'system_specific_params': system_specific_params}]
|
||||||
|
|
||||||
install_params.from_dict(install_params_dict)
|
install_params.from_dict(install_params_dict)
|
||||||
return b2h(install_params.to_bytes())
|
return b2h(install_params.to_bytes())
|
||||||
|
|||||||
@@ -266,13 +266,11 @@ class SCP02(SCP):
|
|||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
def dek_encrypt(self, plaintext:bytes) -> bytes:
|
def dek_encrypt(self, plaintext:bytes) -> bytes:
|
||||||
# See also GPC section B.1.1.2, E.4.7, and E.4.1
|
cipher = DES.new(self.card_keys.dek[:8], DES.MODE_ECB)
|
||||||
cipher = DES3.new(self.sk.data_enc, DES.MODE_ECB)
|
|
||||||
return cipher.encrypt(plaintext)
|
return cipher.encrypt(plaintext)
|
||||||
|
|
||||||
def dek_decrypt(self, ciphertext:bytes) -> bytes:
|
def dek_decrypt(self, ciphertext:bytes) -> bytes:
|
||||||
# See also GPC section B.1.1.2, E.4.7, and E.4.1
|
cipher = DES.new(self.card_keys.dek[:8], DES.MODE_ECB)
|
||||||
cipher = DES3.new(self.sk.data_enc, DES.MODE_ECB)
|
|
||||||
return cipher.decrypt(ciphertext)
|
return cipher.decrypt(ciphertext)
|
||||||
|
|
||||||
def _compute_cryptograms(self, card_challenge: bytes, host_challenge: bytes):
|
def _compute_cryptograms(self, card_challenge: bytes, host_challenge: bytes):
|
||||||
@@ -438,7 +436,7 @@ class Scp03SessionKeys:
|
|||||||
"""Obtain the ICV value computed as described in 6.2.6.
|
"""Obtain the ICV value computed as described in 6.2.6.
|
||||||
This method has two modes:
|
This method has two modes:
|
||||||
* is_response=False for computing the ICV for C-ENC. Will pre-increment the counter.
|
* is_response=False for computing the ICV for C-ENC. Will pre-increment the counter.
|
||||||
* is_response=True for computing the ICV for R-DEC."""
|
* is_response=False for computing the ICV for R-DEC."""
|
||||||
if not is_response:
|
if not is_response:
|
||||||
self.block_nr += 1
|
self.block_nr += 1
|
||||||
# The binary value of this number SHALL be left padded with zeroes to form a full block.
|
# The binary value of this number SHALL be left padded with zeroes to form a full block.
|
||||||
|
|||||||
12
pySim/log.py
12
pySim/log.py
@@ -63,7 +63,7 @@ class PySimLogger:
|
|||||||
raise RuntimeError('static class, do not instantiate')
|
raise RuntimeError('static class, do not instantiate')
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def setup(print_callback = None, colors:dict = {}, verbose_debug:bool = False):
|
def setup(print_callback = None, colors:dict = {}):
|
||||||
"""
|
"""
|
||||||
Set a print callback function and color scheme. This function call is optional. In case this method is not
|
Set a print callback function and color scheme. This function call is optional. In case this method is not
|
||||||
called, default settings apply.
|
called, default settings apply.
|
||||||
@@ -72,20 +72,10 @@ class PySimLogger:
|
|||||||
have the following format: print_callback(message:str)
|
have the following format: print_callback(message:str)
|
||||||
colors : An optional dict through which certain log levels can be assigned a color.
|
colors : An optional dict through which certain log levels can be assigned a color.
|
||||||
(e.g. {logging.WARN: YELLOW})
|
(e.g. {logging.WARN: YELLOW})
|
||||||
verbose_debug: Enable verbose logging and set the loglevel DEBUG when set to true. Otherwise the
|
|
||||||
non-verbose logging is used and the loglevel is set to INFO. This setting can be changed
|
|
||||||
using the set_verbose and set_level methods at any time.
|
|
||||||
"""
|
"""
|
||||||
PySimLogger.print_callback = print_callback
|
PySimLogger.print_callback = print_callback
|
||||||
PySimLogger.colors = colors
|
PySimLogger.colors = colors
|
||||||
|
|
||||||
if (verbose_debug):
|
|
||||||
PySimLogger.set_verbose(True)
|
|
||||||
PySimLogger.set_level(logging.DEBUG)
|
|
||||||
else:
|
|
||||||
PySimLogger.set_verbose(False)
|
|
||||||
PySimLogger.set_level(logging.INFO)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def set_verbose(verbose:bool = False):
|
def set_verbose(verbose:bool = False):
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -221,12 +221,12 @@ class OtaAlgoCrypt(OtaAlgo, abc.ABC):
|
|||||||
for subc in cls.__subclasses__():
|
for subc in cls.__subclasses__():
|
||||||
if subc.enum_name == otak.algo_crypt:
|
if subc.enum_name == otak.algo_crypt:
|
||||||
return subc(otak)
|
return subc(otak)
|
||||||
raise ValueError('No implementation for crypt algorithm %s' % otak.algo_crypt)
|
raise ValueError('No implementation for crypt algorithm %s' % otak.algo_auth)
|
||||||
|
|
||||||
class OtaAlgoAuth(OtaAlgo, abc.ABC):
|
class OtaAlgoAuth(OtaAlgo, abc.ABC):
|
||||||
def __init__(self, otak: OtaKeyset):
|
def __init__(self, otak: OtaKeyset):
|
||||||
if self.enum_name != otak.algo_auth:
|
if self.enum_name != otak.algo_auth:
|
||||||
raise ValueError('Cannot use algorithm %s with key for %s' % (self.enum_name, otak.algo_auth))
|
raise ValueError('Cannot use algorithm %s with key for %s' % (self.enum_name, otak.algo_crypt))
|
||||||
super().__init__(otak)
|
super().__init__(otak)
|
||||||
|
|
||||||
def sign(self, data:bytes) -> bytes:
|
def sign(self, data:bytes) -> bytes:
|
||||||
|
|||||||
10
pySim/sms.py
10
pySim/sms.py
@@ -169,14 +169,8 @@ class SMS_TPDU(abc.ABC):
|
|||||||
|
|
||||||
class SMS_DELIVER(SMS_TPDU):
|
class SMS_DELIVER(SMS_TPDU):
|
||||||
"""Representation of a SMS-DELIVER T-PDU. This is the Network to MS/UE (downlink) direction."""
|
"""Representation of a SMS-DELIVER T-PDU. This is the Network to MS/UE (downlink) direction."""
|
||||||
flags_construct = BitStruct('tp_rp'/Flag,
|
flags_construct = BitStruct('tp_rp'/Flag, 'tp_udhi'/Flag, 'tp_rp'/Flag, 'tp_sri'/Flag,
|
||||||
'tp_udhi'/Flag,
|
Padding(1), 'tp_mms'/Flag, 'tp_mti'/BitsInteger(2))
|
||||||
'tp_sri'/Flag,
|
|
||||||
Padding(1),
|
|
||||||
'tp_lp'/Flag,
|
|
||||||
'tp_mms'/Flag,
|
|
||||||
'tp_mti'/BitsInteger(2))
|
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
kwargs['tp_mti'] = 0
|
kwargs['tp_mti'] = 0
|
||||||
super().__init__(**kwargs)
|
super().__init__(**kwargs)
|
||||||
|
|||||||
@@ -90,7 +90,7 @@ class LinkBase(abc.ABC):
|
|||||||
self.sw_interpreter = sw_interpreter
|
self.sw_interpreter = sw_interpreter
|
||||||
self.apdu_tracer = apdu_tracer
|
self.apdu_tracer = apdu_tracer
|
||||||
self.proactive_handler = proactive_handler
|
self.proactive_handler = proactive_handler
|
||||||
self.apdu_strict = True
|
self.apdu_strict = False
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
|
|||||||
@@ -26,7 +26,6 @@ from smartcard.CardRequest import CardRequest
|
|||||||
from smartcard.Exceptions import NoCardException, CardRequestTimeoutException, CardConnectionException
|
from smartcard.Exceptions import NoCardException, CardRequestTimeoutException, CardConnectionException
|
||||||
from smartcard.System import readers
|
from smartcard.System import readers
|
||||||
from smartcard.ExclusiveConnectCardConnection import ExclusiveConnectCardConnection
|
from smartcard.ExclusiveConnectCardConnection import ExclusiveConnectCardConnection
|
||||||
from smartcard.ATR import ATR
|
|
||||||
|
|
||||||
from osmocom.utils import h2i, i2h, Hexstr
|
from osmocom.utils import h2i, i2h, Hexstr
|
||||||
|
|
||||||
@@ -81,25 +80,23 @@ class PcscSimLink(LinkBaseTpdu):
|
|||||||
|
|
||||||
def connect(self):
|
def connect(self):
|
||||||
try:
|
try:
|
||||||
# To avoid leakage of resources, make sure the reader is disconnected
|
# To avoid leakage of resources, make sure the reader
|
||||||
|
# is disconnected
|
||||||
self.disconnect()
|
self.disconnect()
|
||||||
|
|
||||||
# Make card connection and select a suitable communication protocol
|
# Make card connection and select a suitable communication protocol
|
||||||
# (Even though pyscard provides an automatic protocol selection, we will make an independent decision
|
|
||||||
# based on the ATR. There are two reasons for that:
|
|
||||||
# 1) In case a card supports T=0 and T=1, we perfer to use T=0.
|
|
||||||
# 2) The automatic protocol selection may be unreliabe on some platforms
|
|
||||||
# see also: https://osmocom.org/issues/6952)
|
|
||||||
self._con.connect()
|
self._con.connect()
|
||||||
atr = ATR(self._con.getATR())
|
supported_protocols = self._con.getProtocol();
|
||||||
if atr.isT0Supported():
|
self.disconnect()
|
||||||
self._con.setProtocol(CardConnection.T0_protocol)
|
if (supported_protocols & CardConnection.T0_protocol):
|
||||||
|
protocol = CardConnection.T0_protocol
|
||||||
self.set_tpdu_format(0)
|
self.set_tpdu_format(0)
|
||||||
elif atr.isT1Supported():
|
elif (supported_protocols & CardConnection.T1_protocol):
|
||||||
self._con.setProtocol(CardConnection.T1_protocol)
|
protocol = CardConnection.T1_protocol
|
||||||
self.set_tpdu_format(1)
|
self.set_tpdu_format(1)
|
||||||
else:
|
else:
|
||||||
raise ReaderError('Unsupported card protocol')
|
raise ReaderError('Unsupported card protocol')
|
||||||
|
self._con.connect(protocol)
|
||||||
except CardConnectionException as exc:
|
except CardConnectionException as exc:
|
||||||
raise ProtocolError() from exc
|
raise ProtocolError() from exc
|
||||||
except NoCardException as exc:
|
except NoCardException as exc:
|
||||||
|
|||||||
@@ -1058,7 +1058,7 @@ class EF_OCSGL(LinFixedEF):
|
|||||||
# TS 31.102 Section 4.4.11.2 (Rel 15)
|
# TS 31.102 Section 4.4.11.2 (Rel 15)
|
||||||
class EF_5GS3GPPLOCI(TransparentEF):
|
class EF_5GS3GPPLOCI(TransparentEF):
|
||||||
def __init__(self, fid='4f01', sfid=0x01, name='EF.5GS3GPPLOCI', size=(20, 20),
|
def __init__(self, fid='4f01', sfid=0x01, name='EF.5GS3GPPLOCI', size=(20, 20),
|
||||||
desc='5GS 3GPP location information', **kwargs):
|
desc='5S 3GP location information', **kwargs):
|
||||||
super().__init__(fid, sfid=sfid, name=name, desc=desc, size=size, **kwargs)
|
super().__init__(fid, sfid=sfid, name=name, desc=desc, size=size, **kwargs)
|
||||||
upd_status_constr = Enum(
|
upd_status_constr = Enum(
|
||||||
Byte, updated=0, not_updated=1, roaming_not_allowed=2)
|
Byte, updated=0, not_updated=1, roaming_not_allowed=2)
|
||||||
@@ -1326,7 +1326,7 @@ class EF_5G_PROSE_UIR(TransparentEF):
|
|||||||
pass
|
pass
|
||||||
class FiveGDdnmfCtfAddrForUploading(BER_TLV_IE, tag=0x97):
|
class FiveGDdnmfCtfAddrForUploading(BER_TLV_IE, tag=0x97):
|
||||||
pass
|
pass
|
||||||
class ProSeConfigDataForUsageInfoReporting(BER_TLV_IE, tag=0xa0,
|
class ProSeConfigDataForUeToNetworkRelayUE(BER_TLV_IE, tag=0xa0,
|
||||||
nested=[EF_5G_PROSE_DD.ValidityTimer,
|
nested=[EF_5G_PROSE_DD.ValidityTimer,
|
||||||
CollectionPeriod, ReportingWindow,
|
CollectionPeriod, ReportingWindow,
|
||||||
ReportingIndicators,
|
ReportingIndicators,
|
||||||
@@ -1336,7 +1336,7 @@ class EF_5G_PROSE_UIR(TransparentEF):
|
|||||||
desc='5G ProSe configuration data for usage information reporting', **kwargs):
|
desc='5G ProSe configuration data for usage information reporting', **kwargs):
|
||||||
super().__init__(fid, sfid=sfid, name=name, desc=desc, **kwargs)
|
super().__init__(fid, sfid=sfid, name=name, desc=desc, **kwargs)
|
||||||
# contains TLV structure despite being TransparentEF, not BER-TLV ?!?
|
# contains TLV structure despite being TransparentEF, not BER-TLV ?!?
|
||||||
self._tlv = EF_5G_PROSE_UIR.ProSeConfigDataForUsageInfoReporting
|
self._tlv = EF_5G_PROSE_UIR.ProSeConfigDataForUeToNetworkRelayUE
|
||||||
|
|
||||||
# TS 31.102 Section 4.4.13.8 (Rel 18)
|
# TS 31.102 Section 4.4.13.8 (Rel 18)
|
||||||
class EF_5G_PROSE_U2URU(TransparentEF):
|
class EF_5G_PROSE_U2URU(TransparentEF):
|
||||||
|
|||||||
@@ -261,26 +261,6 @@ class EF_SMSP(LinFixedEF):
|
|||||||
"numbering_plan_id": "reserved_for_extension" },
|
"numbering_plan_id": "reserved_for_extension" },
|
||||||
"call_number": "" },
|
"call_number": "" },
|
||||||
"tp_pid": b"\x00", "tp_dcs": b"\x00", "tp_vp_minutes": 1440 } ),
|
"tp_pid": b"\x00", "tp_dcs": b"\x00", "tp_vp_minutes": 1440 } ),
|
||||||
( 'fffffffffffffffffffffffffffffffffffffffffffffffffdffffffffffffffffffffffff07919403214365f7ffffffffffffff',
|
|
||||||
{ "alpha_id": "", "parameter_indicators": { "tp_dest_addr": False, "tp_sc_addr": True,
|
|
||||||
"tp_pid": False, "tp_dcs": False, "tp_vp": False },
|
|
||||||
"tp_dest_addr": { "length": 255, "ton_npi": { "ext": True, "type_of_number": "reserved_for_extension",
|
|
||||||
"numbering_plan_id": "reserved_for_extension" },
|
|
||||||
"call_number": "" },
|
|
||||||
"tp_sc_addr": { "length": 7, "ton_npi": { "ext": True, "type_of_number": "international",
|
|
||||||
"numbering_plan_id": "isdn_e164" },
|
|
||||||
"call_number": "49301234567" },
|
|
||||||
"tp_pid": b"\xff", "tp_dcs": b"\xff", "tp_vp_minutes": 635040 } ),
|
|
||||||
( 'fffffffffffffffffffffffffffffffffffffffffffffffffc0b919403214365f7ffffffff07919403214365f7ffffffffffffff',
|
|
||||||
{ "alpha_id": "", "parameter_indicators": { "tp_dest_addr": True, "tp_sc_addr": True,
|
|
||||||
"tp_pid": False, "tp_dcs": False, "tp_vp": False },
|
|
||||||
"tp_dest_addr": { "length": 11, "ton_npi": { "ext": True, "type_of_number": "international",
|
|
||||||
"numbering_plan_id": "isdn_e164" },
|
|
||||||
"call_number": "49301234567" },
|
|
||||||
"tp_sc_addr": { "length": 7, "ton_npi": { "ext": True, "type_of_number": "international",
|
|
||||||
"numbering_plan_id": "isdn_e164" },
|
|
||||||
"call_number": "49301234567" },
|
|
||||||
"tp_pid": b"\xff", "tp_dcs": b"\xff", "tp_vp_minutes": 635040 } ),
|
|
||||||
]
|
]
|
||||||
_test_no_pad = True
|
_test_no_pad = True
|
||||||
class ValidityPeriodAdapter(Adapter):
|
class ValidityPeriodAdapter(Adapter):
|
||||||
@@ -309,28 +289,16 @@ class EF_SMSP(LinFixedEF):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def sc_addr_len(ctx):
|
def sc_addr_len(ctx):
|
||||||
"""Compute the length field for an address field (see also: 3GPP TS 24.011, section 8.2.5.2)."""
|
"""Compute the length field for an address field (like TP-DestAddr or TP-ScAddr)."""
|
||||||
if not hasattr(ctx, 'call_number') or len(ctx.call_number) == 0:
|
if not hasattr(ctx, 'call_number') or len(ctx.call_number) == 0:
|
||||||
return 0xff
|
return 0xff
|
||||||
else:
|
else:
|
||||||
# octets required for the call_number + one octet for ton_npi
|
|
||||||
return bytes_for_nibbles(len(ctx.call_number)) + 1
|
return bytes_for_nibbles(len(ctx.call_number)) + 1
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def dest_addr_len(ctx):
|
|
||||||
"""Compute the length field for an address field (see also: 3GPP TS 23.040, section 9.1.2.5)."""
|
|
||||||
if not hasattr(ctx, 'call_number') or len(ctx.call_number) == 0:
|
|
||||||
return 0xff
|
|
||||||
else:
|
|
||||||
# number of call_number digits
|
|
||||||
return len(ctx.call_number)
|
|
||||||
|
|
||||||
def __init__(self, fid='6f42', sfid=None, name='EF.SMSP', desc='Short message service parameters', **kwargs):
|
def __init__(self, fid='6f42', sfid=None, name='EF.SMSP', desc='Short message service parameters', **kwargs):
|
||||||
super().__init__(fid, sfid=sfid, name=name, desc=desc, rec_len=(28, None), **kwargs)
|
super().__init__(fid, sfid=sfid, name=name, desc=desc, rec_len=(28, None), **kwargs)
|
||||||
ScAddr = Struct('length'/Rebuild(Int8ub, lambda ctx: EF_SMSP.sc_addr_len(ctx)),
|
ScAddr = Struct('length'/Rebuild(Int8ub, lambda ctx: EF_SMSP.sc_addr_len(ctx)),
|
||||||
'ton_npi'/TonNpi, 'call_number'/PaddedBcdAdapter(Rpad(Bytes(10))))
|
'ton_npi'/TonNpi, 'call_number'/PaddedBcdAdapter(Rpad(Bytes(10))))
|
||||||
DestAddr = Struct('length'/Rebuild(Int8ub, lambda ctx: EF_SMSP.dest_addr_len(ctx)),
|
|
||||||
'ton_npi'/TonNpi, 'call_number'/PaddedBcdAdapter(Rpad(Bytes(10))))
|
|
||||||
self._construct = Struct('alpha_id'/COptional(GsmOrUcs2Adapter(Rpad(Bytes(this._.total_len-28)))),
|
self._construct = Struct('alpha_id'/COptional(GsmOrUcs2Adapter(Rpad(Bytes(this._.total_len-28)))),
|
||||||
'parameter_indicators'/InvertAdapter(BitStruct(
|
'parameter_indicators'/InvertAdapter(BitStruct(
|
||||||
Const(7, BitsInteger(3)),
|
Const(7, BitsInteger(3)),
|
||||||
@@ -339,8 +307,9 @@ class EF_SMSP(LinFixedEF):
|
|||||||
'tp_pid'/Flag,
|
'tp_pid'/Flag,
|
||||||
'tp_sc_addr'/Flag,
|
'tp_sc_addr'/Flag,
|
||||||
'tp_dest_addr'/Flag)),
|
'tp_dest_addr'/Flag)),
|
||||||
'tp_dest_addr'/DestAddr,
|
'tp_dest_addr'/ScAddr,
|
||||||
'tp_sc_addr'/ScAddr,
|
'tp_sc_addr'/ScAddr,
|
||||||
|
|
||||||
'tp_pid'/Bytes(1),
|
'tp_pid'/Bytes(1),
|
||||||
'tp_dcs'/Bytes(1),
|
'tp_dcs'/Bytes(1),
|
||||||
'tp_vp_minutes'/EF_SMSP.ValidityPeriodAdapter(Byte))
|
'tp_vp_minutes'/EF_SMSP.ValidityPeriodAdapter(Byte))
|
||||||
@@ -420,7 +389,7 @@ class DF_TELECOM(CardDF):
|
|||||||
# TS 51.011 Section 10.3.1
|
# TS 51.011 Section 10.3.1
|
||||||
class EF_LP(TransRecEF):
|
class EF_LP(TransRecEF):
|
||||||
_test_de_encode = [
|
_test_de_encode = [
|
||||||
( "24", ["24"] ),
|
( "24", "24"),
|
||||||
]
|
]
|
||||||
def __init__(self, fid='6f05', sfid=None, name='EF.LP', size=(1, None), rec_len=1,
|
def __init__(self, fid='6f05', sfid=None, name='EF.LP', size=(1, None), rec_len=1,
|
||||||
desc='Language Preference'):
|
desc='Language Preference'):
|
||||||
@@ -477,8 +446,8 @@ class EF_IMSI(TransparentEF):
|
|||||||
# TS 51.011 Section 10.3.4
|
# TS 51.011 Section 10.3.4
|
||||||
class EF_PLMNsel(TransRecEF):
|
class EF_PLMNsel(TransRecEF):
|
||||||
_test_de_encode = [
|
_test_de_encode = [
|
||||||
( "22F860", [{ "mcc": "228", "mnc": "06" }] ),
|
( "22F860", { "mcc": "228", "mnc": "06" } ),
|
||||||
( "330420", [{ "mcc": "334", "mnc": "020" }] ),
|
( "330420", { "mcc": "334", "mnc": "020" } ),
|
||||||
]
|
]
|
||||||
def __init__(self, fid='6f30', sfid=None, name='EF.PLMNsel', desc='PLMN selector',
|
def __init__(self, fid='6f30', sfid=None, name='EF.PLMNsel', desc='PLMN selector',
|
||||||
size=(24, None), rec_len=3, **kwargs):
|
size=(24, None), rec_len=3, **kwargs):
|
||||||
@@ -692,7 +661,7 @@ class EF_AD(TransparentEF):
|
|||||||
# TS 51.011 Section 10.3.20 / 10.3.22
|
# TS 51.011 Section 10.3.20 / 10.3.22
|
||||||
class EF_VGCS(TransRecEF):
|
class EF_VGCS(TransRecEF):
|
||||||
_test_de_encode = [
|
_test_de_encode = [
|
||||||
( "92f9ffff", ["299"] ),
|
( "92f9ffff", "299" ),
|
||||||
]
|
]
|
||||||
def __init__(self, fid='6fb1', sfid=None, name='EF.VGCS', size=(4, 200), rec_len=4,
|
def __init__(self, fid='6fb1', sfid=None, name='EF.VGCS', size=(4, 200), rec_len=4,
|
||||||
desc='Voice Group Call Service', **kwargs):
|
desc='Voice Group Call Service', **kwargs):
|
||||||
@@ -828,9 +797,9 @@ class EF_LOCIGPRS(TransparentEF):
|
|||||||
# TS 51.011 Section 10.3.35..37
|
# TS 51.011 Section 10.3.35..37
|
||||||
class EF_xPLMNwAcT(TransRecEF):
|
class EF_xPLMNwAcT(TransRecEF):
|
||||||
_test_de_encode = [
|
_test_de_encode = [
|
||||||
( '62F2104000', [{ "mcc": "262", "mnc": "01", "act": [ "E-UTRAN NB-S1", "E-UTRAN WB-S1" ] }] ),
|
( '62F2104000', { "mcc": "262", "mnc": "01", "act": [ "E-UTRAN NB-S1", "E-UTRAN WB-S1" ] } ),
|
||||||
( '62F2108000', [{ "mcc": "262", "mnc": "01", "act": [ "UTRAN" ] }] ),
|
( '62F2108000', { "mcc": "262", "mnc": "01", "act": [ "UTRAN" ] } ),
|
||||||
( '62F220488C', [{ "mcc": "262", "mnc": "02", "act": ['E-UTRAN NB-S1', 'E-UTRAN WB-S1', 'EC-GSM-IoT', 'GSM', 'NG-RAN'] }] ),
|
( '62F220488C', { "mcc": "262", "mnc": "02", "act": ['E-UTRAN NB-S1', 'E-UTRAN WB-S1', 'EC-GSM-IoT', 'GSM', 'NG-RAN'] } ),
|
||||||
]
|
]
|
||||||
def __init__(self, fid='1234', sfid=None, name=None, desc=None, size=(40, None), rec_len=5, **kwargs):
|
def __init__(self, fid='1234', sfid=None, name=None, desc=None, size=(40, None), rec_len=5, **kwargs):
|
||||||
super().__init__(fid, sfid=sfid, name=name, desc=desc, size=size, rec_len=rec_len, **kwargs)
|
super().__init__(fid, sfid=sfid, name=name, desc=desc, size=size, rec_len=rec_len, **kwargs)
|
||||||
@@ -1065,10 +1034,9 @@ class EF_ICCID(TransparentEF):
|
|||||||
# TS 102 221 Section 13.3 / TS 31.101 Section 13 / TS 51.011 Section 10.1.2
|
# TS 102 221 Section 13.3 / TS 31.101 Section 13 / TS 51.011 Section 10.1.2
|
||||||
class EF_PL(TransRecEF):
|
class EF_PL(TransRecEF):
|
||||||
_test_de_encode = [
|
_test_de_encode = [
|
||||||
( '6465', ["de"] ),
|
( '6465', "de" ),
|
||||||
( '656e', ["en"] ),
|
( '656e', "en" ),
|
||||||
( 'ffff', [None] ),
|
( 'ffff', None ),
|
||||||
( '656e64657275ffffffff', ["en", "de", "ru", None, None] ),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
def __init__(self, fid='2f05', sfid=0x05, name='EF.PL', desc='Preferred Languages'):
|
def __init__(self, fid='2f05', sfid=0x05, name='EF.PL', desc='Preferred Languages'):
|
||||||
@@ -1149,8 +1117,8 @@ class DF_GSM(CardDF):
|
|||||||
EF_MBI(),
|
EF_MBI(),
|
||||||
EF_MWIS(),
|
EF_MWIS(),
|
||||||
EF_CFIS(),
|
EF_CFIS(),
|
||||||
EF_EXT('6fc8', None, 'EF.EXT6', desc='Extension6 (MBDN)'),
|
EF_EXT('6fc8', None, 'EF.EXT6', desc='Externsion6 (MBDN)'),
|
||||||
EF_EXT('6fcc', None, 'EF.EXT7', desc='Extension7 (CFIS)'),
|
EF_EXT('6fcc', None, 'EF.EXT7', desc='Externsion7 (CFIS)'),
|
||||||
EF_SPDI(),
|
EF_SPDI(),
|
||||||
EF_MMSN(),
|
EF_MMSN(),
|
||||||
EF_EXT('6fcf', None, 'EF.EXT8', desc='Extension8 (MMSN)'),
|
EF_EXT('6fcf', None, 'EF.EXT8', desc='Extension8 (MMSN)'),
|
||||||
|
|||||||
@@ -139,6 +139,7 @@ def enc_plmn(mcc: Hexstr, mnc: Hexstr) -> Hexstr:
|
|||||||
|
|
||||||
def dec_plmn(threehexbytes: Hexstr) -> dict:
|
def dec_plmn(threehexbytes: Hexstr) -> dict:
|
||||||
res = {'mcc': "0", 'mnc': "0"}
|
res = {'mcc': "0", 'mnc': "0"}
|
||||||
|
dec_mcc_from_plmn_str(threehexbytes)
|
||||||
res['mcc'] = dec_mcc_from_plmn_str(threehexbytes)
|
res['mcc'] = dec_mcc_from_plmn_str(threehexbytes)
|
||||||
res['mnc'] = dec_mnc_from_plmn_str(threehexbytes)
|
res['mnc'] = dec_mnc_from_plmn_str(threehexbytes)
|
||||||
return res
|
return res
|
||||||
@@ -910,8 +911,7 @@ class DataObjectCollection:
|
|||||||
def encode(self, decoded) -> bytes:
|
def encode(self, decoded) -> bytes:
|
||||||
res = bytearray()
|
res = bytearray()
|
||||||
for i in decoded:
|
for i in decoded:
|
||||||
name = i[0]
|
obj = self.members_by_name(i[0])
|
||||||
obj = self.members_by_name[name]
|
|
||||||
res.append(obj.to_tlv())
|
res.append(obj.to_tlv())
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
|||||||
@@ -2200,9 +2200,9 @@ update_record 6 fe0112ffb53e96e5ff99731d51ad7beafd0e23ffffffffffffffffffffffffff
|
|||||||
update_record 7 fe02101da012f436d06824ecdd15050419ff9affffffffffffffffffffffffffffffff
|
update_record 7 fe02101da012f436d06824ecdd15050419ff9affffffffffffffffffffffffffffffff
|
||||||
update_record 8 fe02116929a373388ac904aff57ff57f6b3431ffffffffffffffffffffffffffffffff
|
update_record 8 fe02116929a373388ac904aff57ff57f6b3431ffffffffffffffffffffffffffffffff
|
||||||
update_record 9 fe0212a99245a5dc814e2f4c1aa908e9946e03ffffffffffffffffffffffffffffffff
|
update_record 9 fe0212a99245a5dc814e2f4c1aa908e9946e03ffffffffffffffffffffffffffffffff
|
||||||
update_record 10 fe03601111111111111111111111111111111111111111111111111111111111111111
|
update_record 10 fe0310521312c05a9aea93d70d44405172a580ffffffffffffffffffffffffffffffff
|
||||||
update_record 11 fe03612222222222222222222222222222222222222222222222222222222222222222
|
update_record 11 fe0311a9e45c72d45abde7db74261ee0c11b1bffffffffffffffffffffffffffffffff
|
||||||
update_record 12 fe03623333333333333333333333333333333333333333333333333333333333333333
|
update_record 12 fe0312867ba36b5873d60ea8b2cdcf3c0ddddaffffffffffffffffffffffffffffffff
|
||||||
#
|
#
|
||||||
################################################################################
|
################################################################################
|
||||||
# MF/DF.SYSTEM/EF.SIM_AUTH_COUNTER #
|
# MF/DF.SYSTEM/EF.SIM_AUTH_COUNTER #
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
INFO: Using PC/SC reader interface
|
Using PC/SC reader interface
|
||||||
Reading ...
|
Reading ...
|
||||||
Autodetected card type: Fairwaves-SIM
|
Autodetected card type: Fairwaves-SIM
|
||||||
ICCID: 8988219000000117833
|
ICCID: 8988219000000117833
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
INFO: Using PC/SC reader interface
|
Using PC/SC reader interface
|
||||||
Reading ...
|
Reading ...
|
||||||
Autodetected card type: Wavemobile-SIM
|
Autodetected card type: Wavemobile-SIM
|
||||||
ICCID: 89445310150011013678
|
ICCID: 89445310150011013678
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
INFO: Using PC/SC reader interface
|
Using PC/SC reader interface
|
||||||
Reading ...
|
Reading ...
|
||||||
Autodetected card type: fakemagicsim
|
Autodetected card type: fakemagicsim
|
||||||
ICCID: 1122334455667788990
|
ICCID: 1122334455667788990
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
INFO: Using PC/SC reader interface
|
Using PC/SC reader interface
|
||||||
Reading ...
|
Reading ...
|
||||||
Autodetected card type: sysmoISIM-SJA2
|
Autodetected card type: sysmoISIM-SJA2
|
||||||
ICCID: 8988211000000467343
|
ICCID: 8988211000000467343
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
INFO: Using PC/SC reader interface
|
Using PC/SC reader interface
|
||||||
Reading ...
|
Reading ...
|
||||||
Autodetected card type: sysmoISIM-SJA5
|
Autodetected card type: sysmoISIM-SJA5
|
||||||
ICCID: 8949440000001155314
|
ICCID: 8949440000001155314
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
INFO: Using PC/SC reader interface
|
Using PC/SC reader interface
|
||||||
Reading ...
|
Reading ...
|
||||||
Autodetected card type: sysmoUSIM-SJS1
|
Autodetected card type: sysmoUSIM-SJS1
|
||||||
ICCID: 8988211320300000028
|
ICCID: 8988211320300000028
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
INFO: Using PC/SC reader interface
|
Using PC/SC reader interface
|
||||||
Reading ...
|
Reading ...
|
||||||
Autodetected card type: sysmosim-gr1
|
Autodetected card type: sysmosim-gr1
|
||||||
ICCID: 2222334455667788990
|
ICCID: 2222334455667788990
|
||||||
|
|||||||
9
tests/pySim-smpp2sim_test/pySim-smpp2sim_test.cfg
Normal file
9
tests/pySim-smpp2sim_test/pySim-smpp2sim_test.cfg
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
# Card parameter:
|
||||||
|
ICCID="8949440000001155314"
|
||||||
|
KIC='51D4FC44BCBA7C4589DFADA3297720AF'
|
||||||
|
KID='0449699C472CE71E2FB7B56245EF7684'
|
||||||
|
|
||||||
|
# Testcase: Send OTA-SMS that selects DF.GSM and returns the select response
|
||||||
|
TAR='B00010'
|
||||||
|
APDU='A0A40000027F20A0C0000016'
|
||||||
|
EXPECTED_RESPONSE='0000ffff7f2002000000000009b106350400838a838a 9000'
|
||||||
@@ -20,14 +20,13 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
PYSIM_SHELL=./pySim-shell.py
|
|
||||||
PYSIM_SHELL_LOG=./pySim-shell.log
|
|
||||||
PYSIM_SMPP2SIM=./pySim-smpp2sim.py
|
PYSIM_SMPP2SIM=./pySim-smpp2sim.py
|
||||||
PYSIM_SMPP2SIM_LOG=./pySim-smpp2sim.log
|
PYSIM_SMPP2SIM_LOG=./pySim-smpp2sim.log
|
||||||
PYSIM_SMPP2SIM_PORT=2775
|
PYSIM_SMPP2SIM_PORT=2775
|
||||||
PYSIM_SMPP2SIM_TIMEOUT=10
|
PYSIM_SMPP2SIM_TIMEOUT=10
|
||||||
PYSIM_SMPPOTATOOL=./contrib/smpp-ota-tool.py
|
PYSIM_SMPPOTATOOL=./contrib/smpp-ota-tool.py
|
||||||
PYSIM_SMPPOTATOOL_LOG=./smpp-ota-tool.log
|
PYSIM_SMPPOTATOOL_LOG=./smpp-ota-tool.log
|
||||||
|
PYSIM_SHELL=./pySim-shell.py
|
||||||
|
|
||||||
function dump_logs {
|
function dump_logs {
|
||||||
echo ""
|
echo ""
|
||||||
@@ -45,11 +44,12 @@ function dump_logs {
|
|||||||
function send_test_request {
|
function send_test_request {
|
||||||
echo ""
|
echo ""
|
||||||
echo "Sending request to SMPP server:"
|
echo "Sending request to SMPP server:"
|
||||||
C_APDU=$1
|
TAR=$1
|
||||||
R_APDU_EXPECTED=$2
|
C_APDU=$2
|
||||||
|
R_APDU_EXPECTED=$3
|
||||||
|
|
||||||
echo "Sending: $C_APDU"
|
echo "Sending: $C_APDU"
|
||||||
COMMANDLINE="$PYSIM_SMPPOTATOOL --verbose --port $PYSIM_SMPP2SIM_PORT --kic $KIC --kid $KID --kic-idx $KEY_INDEX --kid-idx $KEY_INDEX --algo-crypt $ALGO_CRYPT --algo-auth $ALGO_AUTH --tar $TAR --apdu $C_APDU"
|
COMMANDLINE="$PYSIM_SMPPOTATOOL --verbose --port $PYSIM_SMPP2SIM_PORT --kic $KIC --kid $KID --tar $TAR --apdu $C_APDU"
|
||||||
echo "Commandline: $COMMANDLINE"
|
echo "Commandline: $COMMANDLINE"
|
||||||
R_APDU=`$COMMANDLINE 2> $PYSIM_SMPPOTATOOL_LOG`
|
R_APDU=`$COMMANDLINE 2> $PYSIM_SMPPOTATOOL_LOG`
|
||||||
if [ $? -ne 0 ]; then
|
if [ $? -ne 0 ]; then
|
||||||
@@ -57,7 +57,7 @@ function send_test_request {
|
|||||||
dump_logs
|
dump_logs
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
echo ""
|
|
||||||
echo "Got response from SMPP server:"
|
echo "Got response from SMPP server:"
|
||||||
echo "Sent: $C_APDU"
|
echo "Sent: $C_APDU"
|
||||||
echo "Received: $R_APDU"
|
echo "Received: $R_APDU"
|
||||||
@@ -68,14 +68,16 @@ function send_test_request {
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
echo "Response matches the expected response -- success!"
|
echo "Response matches the expected response -- success!"
|
||||||
|
echo ""
|
||||||
}
|
}
|
||||||
|
|
||||||
function start_smpp_server {
|
function start_smpp_server {
|
||||||
PCSC_READER=$1
|
PCSC_READER=$1
|
||||||
|
|
||||||
|
# Start the SMPP server
|
||||||
echo ""
|
echo ""
|
||||||
echo "Starting SMPP server:"
|
echo "Starting SMPP server:"
|
||||||
|
|
||||||
# Start the SMPP server
|
|
||||||
COMMANDLINE="$PYSIM_SMPP2SIM -p $PCSC_READER --smpp-bind-port $PYSIM_SMPP2SIM_PORT --apdu-trace"
|
COMMANDLINE="$PYSIM_SMPP2SIM -p $PCSC_READER --smpp-bind-port $PYSIM_SMPP2SIM_PORT --apdu-trace"
|
||||||
echo "Commandline: $COMMANDLINE"
|
echo "Commandline: $COMMANDLINE"
|
||||||
$COMMANDLINE > $PYSIM_SMPP2SIM_LOG 2>&1 &
|
$COMMANDLINE > $PYSIM_SMPP2SIM_LOG 2>&1 &
|
||||||
@@ -100,117 +102,55 @@ function start_smpp_server {
|
|||||||
echo "SMPP server reachable (port=$PYSIM_SMPP2SIM_PORT)"
|
echo "SMPP server reachable (port=$PYSIM_SMPP2SIM_PORT)"
|
||||||
}
|
}
|
||||||
|
|
||||||
function stop_smpp_server {
|
function find_card_by_iccid {
|
||||||
echo ""
|
# Find reader number of the card
|
||||||
echo "Stopping SMPP server:"
|
|
||||||
kill $PYSIM_SMPP2SIM_PID
|
|
||||||
echo "SMPP server stopped (PID=$PYSIM_SMPP2SIM_PID)"
|
|
||||||
trap EXIT
|
|
||||||
}
|
|
||||||
|
|
||||||
function find_card_by_iccid_or_eid {
|
|
||||||
ICCID=$1
|
ICCID=$1
|
||||||
EID=$2
|
|
||||||
echo ""
|
echo ""
|
||||||
echo "Searching for card:"
|
echo "Searching for card:"
|
||||||
echo "ICCID: \"$ICCID\""
|
echo "ICCID: \"$ICCID\""
|
||||||
if [ -n "$EID" ]; then
|
|
||||||
echo "EID: \"$EID\""
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Determine number of available PCSC readers
|
|
||||||
PCSC_READER_COUNT=`pcsc_scan -rn | wc -l`
|
|
||||||
|
|
||||||
# In case an EID is set, search for a card with that EID first
|
|
||||||
if [ -n "$EID" ]; then
|
|
||||||
for PCSC_READER in $(seq 0 $(($PCSC_READER_COUNT-1))); do
|
|
||||||
echo "probing card (eID) in reader $PCSC_READER ..."
|
|
||||||
RESULT_JSON=`$PYSIM_SHELL -p $PCSC_READER --noprompt -e "select ADF.ISD-R" -e "get_eid" 2> /dev/null | tail -3`
|
|
||||||
echo $RESULT_JSON | grep $EID > /dev/null
|
|
||||||
if [ $? -eq 0 ]; then
|
|
||||||
echo "Found card (eID) in reader $PCSC_READER"
|
|
||||||
return $PCSC_READER
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Search for card with the given ICCID
|
|
||||||
if [ -z "$ICCID" ]; then
|
if [ -z "$ICCID" ]; then
|
||||||
echo "invalid ICCID, zero length ICCID is not allowed! -- abort"
|
echo "invalid ICCID, zero length ICCID is not allowed! -- abort"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
PCSC_READER_COUNT=`pcsc_scan -rn | wc -l`
|
||||||
for PCSC_READER in $(seq 0 $(($PCSC_READER_COUNT-1))); do
|
for PCSC_READER in $(seq 0 $(($PCSC_READER_COUNT-1))); do
|
||||||
echo "probing card (ICCID) in reader $PCSC_READER ..."
|
echo "probing card in reader $PCSC_READER ..."
|
||||||
RESULT_JSON=`$PYSIM_SHELL -p $PCSC_READER --noprompt -e "select EF.ICCID" -e "read_binary_decoded" 2> /dev/null | tail -3`
|
EF_ICCID_DECODED=`$PYSIM_SHELL -p $PCSC_READER --noprompt -e 'select EF.ICCID' -e 'read_binary_decoded --oneline' 2> /dev/null | tail -1`
|
||||||
echo $RESULT_JSON | grep $ICCID > /dev/null
|
echo $EF_ICCID_DECODED | grep $ICCID > /dev/null
|
||||||
if [ $? -eq 0 ]; then
|
if [ $? -eq 0 ]; then
|
||||||
echo "Found card (by ICCID) in reader $PCSC_READER"
|
echo "Found card in reader $PCSC_READER"
|
||||||
return $PCSC_READER
|
return $PCSC_READER
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
echo "Card not found -- abort"
|
echo "Card with ICCID \"$ICCID\" not found -- abort"
|
||||||
exit 1
|
exit 1
|
||||||
}
|
}
|
||||||
|
|
||||||
function enable_profile {
|
|
||||||
PCSC_READER=$1
|
|
||||||
ICCID=$2
|
|
||||||
EID=$3
|
|
||||||
if [ -z "$EID" ]; then
|
|
||||||
# This is no eUICC, nothing to enable
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Check if the profile is already enabled
|
|
||||||
RESULT_JSON=`$PYSIM_SHELL -p $PCSC_READER --noprompt -e "select EF.ICCID" -e "read_binary_decoded" 2> /dev/null | tail -3`
|
|
||||||
ICCID_ENABLED=`echo $RESULT_JSON | jq -r '.iccid'`
|
|
||||||
if [ $ICCID != $ICCID_ENABLED ]; then
|
|
||||||
# Disable the currentle enabled profile
|
|
||||||
echo ""
|
|
||||||
echo "Disabeling currently enabled profile:"
|
|
||||||
echo "ICCID: \"$ICCID\""
|
|
||||||
RESULT_JSON=`$PYSIM_SHELL -p $PCSC_READER --noprompt -e "select ADF.ISD-R" -e "disable_profile --iccid $ICCID_ENABLED" 2> /dev/null | tail -3`
|
|
||||||
echo $RESULT_JSON | grep "ok" > /dev/null
|
|
||||||
if [ $? -ne 0 ]; then
|
|
||||||
echo "unable to disable profile with \"$ICCID_ENABLED\""
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
echo "profile disabled"
|
|
||||||
|
|
||||||
# Enable the profile we intend to test with
|
|
||||||
echo ""
|
|
||||||
echo "Enabeling profile:"
|
|
||||||
echo "ICCID: \"$ICCID\""
|
|
||||||
RESULT_JSON=`$PYSIM_SHELL -p $PCSC_READER --noprompt -e "select ADF.ISD-R" -e "enable_profile --iccid $ICCID" 2> /dev/null | tail -3`
|
|
||||||
echo $RESULT_JSON | grep "ok\|profileNotInDisabledState" > /dev/null
|
|
||||||
if [ $? -ne 0 ]; then
|
|
||||||
echo "unable to enable profile with \"$ICCID\""
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
echo "profile enabled"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
export PYTHONPATH=./
|
export PYTHONPATH=./
|
||||||
|
|
||||||
echo "pySim-smpp2sim_test - a test program to test pySim-smpp2sim.py"
|
echo "pySim-smpp2sim_test - a test program to test pySim-smpp2sim.py"
|
||||||
echo "=============================================================="
|
echo "=============================================================="
|
||||||
|
|
||||||
TESTCASE_DIR=`dirname $0`
|
# TODO: At the moment we can only have one card and one testcase. This is
|
||||||
for TEST_CONFIG_FILE in $TESTCASE_DIR/testcase_*.cfg ; do
|
# sufficient for now. We can extend this later as needed.
|
||||||
echo ""
|
|
||||||
echo "running testcase: $TEST_CONFIG_FILE"
|
# Read test parameters from config from file
|
||||||
. $TEST_CONFIG_FILE
|
TEST_CONFIG_FILE=${0%.*}.cfg
|
||||||
find_card_by_iccid_or_eid $ICCID $EID
|
echo "using config file: $TEST_CONFIG_FILE"
|
||||||
PCSC_READER=$?
|
if ! [ -e "$TEST_CONFIG_FILE" ]; then
|
||||||
enable_profile $PCSC_READER $ICCID $EID
|
echo "test configuration file does not exist! -- abort"
|
||||||
start_smpp_server $PCSC_READER
|
exit 1
|
||||||
send_test_request $APDU "$EXPECTED_RESPONSE"
|
fi
|
||||||
stop_smpp_server
|
. $TEST_CONFIG_FILE
|
||||||
echo ""
|
|
||||||
echo "testcase ok"
|
# Execute testcase
|
||||||
echo "--------------------------------------------------------------"
|
find_card_by_iccid $ICCID
|
||||||
done
|
start_smpp_server $?
|
||||||
|
send_test_request $TAR $APDU "$EXPECTED_RESPONSE"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
echo "done."
|
|
||||||
|
|||||||
@@ -1,17 +0,0 @@
|
|||||||
# Preparation:
|
|
||||||
# This testcase executes against a sysmoISIM-SJA5 card. For the testcase, the
|
|
||||||
# key configuration on the card may be used as it is.
|
|
||||||
|
|
||||||
# Card parameter:
|
|
||||||
ICCID="8949440000001155314" # <-- change to the ICCID of your card!
|
|
||||||
EID=""
|
|
||||||
KIC='51D4FC44BCBA7C4589DFADA3297720AF' # <-- change to the KIC1 of your card!
|
|
||||||
KID='0449699C472CE71E2FB7B56245EF7684' # <-- change to the KID1 of your card!
|
|
||||||
KEY_INDEX=1
|
|
||||||
ALGO_CRYPT=triple_des_cbc2
|
|
||||||
ALGO_AUTH=triple_des_cbc2
|
|
||||||
TAR='B00010'
|
|
||||||
|
|
||||||
# Testcase: Send OTA-SMS that selects DF.GSM and returns the select response
|
|
||||||
APDU='A0A40000027F20A0C0000016'
|
|
||||||
EXPECTED_RESPONSE='0000ffff7f2002000000000009b106350400838a838a 9000'
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
# Preparation:
|
|
||||||
# This testcase executes against a sysmoEUICC1-C2T, which is equipped with the
|
|
||||||
# TS48V1-B-UNIQUE test profile from https://test.rsp.sysmocom.de/ (Activation
|
|
||||||
# code: 1$smdpp.test.rsp.sysmocom.de$TS48V1-B-UNIQUE). This testprofile must be
|
|
||||||
# present on the eUICC before this testcase can be executed.
|
|
||||||
|
|
||||||
# Card parameter:
|
|
||||||
ICCID="8949449999999990031"
|
|
||||||
EID="89049044900000000000000000102355" # <-- change to the EID of your card!
|
|
||||||
KIC='66778899aabbccdd1122334455eeff10'
|
|
||||||
KID='112233445566778899aabbccddeeff10'
|
|
||||||
KEY_INDEX=2
|
|
||||||
ALGO_CRYPT=aes_cbc
|
|
||||||
ALGO_AUTH=aes_cmac
|
|
||||||
TAR='b00120'
|
|
||||||
|
|
||||||
# Testcase: Send OTA-SMS that selects DF.ICCID and returns the select response
|
|
||||||
APDU='00a40004022fe200C000001d'
|
|
||||||
EXPECTED_RESPONSE='621b8202412183022fe2a503d001408a01058b032f06038002000a8800 9000'
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
# Preparation:
|
|
||||||
# This testcase executes against a sysmoISIM-SJA5 card. Since this card model is
|
|
||||||
# shipped with a classic DES key configuration, it is necessary to provision
|
|
||||||
# AES128 test keys before this testcase may be executed. The the following
|
|
||||||
# pySim-shell command sequence may be used:
|
|
||||||
#
|
|
||||||
# verify_adm 34173960 # <-- change to the ADM key of your card!
|
|
||||||
# select /DF.SYSTEM/EF.0348_KEY
|
|
||||||
# update_record 10 fe03601111111111111111111111111111111111111111111111111111111111111111
|
|
||||||
# update_record 11 fe03612222222222222222222222222222222222222222222222222222222222222222
|
|
||||||
# update_record 12 fe03623333333333333333333333333333333333333333333333333333333333333333
|
|
||||||
#
|
|
||||||
# This overwrites one of the already existing 3DES SCP02 key (KVN 47) and replaces it
|
|
||||||
# with an AES256 SCP80 key (KVN 3).
|
|
||||||
|
|
||||||
# Card parameter:
|
|
||||||
ICCID="8949440000001155314" # <-- change to the ICCID of your card!
|
|
||||||
EID=""
|
|
||||||
KIC='1111111111111111111111111111111111111111111111111111111111111111'
|
|
||||||
KID='2222222222222222222222222222222222222222222222222222222222222222'
|
|
||||||
KEY_INDEX=3
|
|
||||||
ALGO_CRYPT=aes_cbc
|
|
||||||
ALGO_AUTH=aes_cmac
|
|
||||||
TAR='B00010'
|
|
||||||
|
|
||||||
# Testcase: Send OTA-SMS that selects DF.GSM and returns the select response
|
|
||||||
APDU='A0A40000027F20A0C0000016'
|
|
||||||
EXPECTED_RESPONSE='0000ffff7f2002000000000009b106350400838a838a 9000'
|
|
||||||
@@ -310,14 +310,11 @@ class ConfigurableParameterTest(unittest.TestCase):
|
|||||||
p13n.SdKeyScp80Kvn03DesDek,
|
p13n.SdKeyScp80Kvn03DesDek,
|
||||||
#p13n.SdKeyScp80Kvn03DesEnc,
|
#p13n.SdKeyScp80Kvn03DesEnc,
|
||||||
#p13n.SdKeyScp80Kvn03DesMac,
|
#p13n.SdKeyScp80Kvn03DesMac,
|
||||||
#p13n.SdKeyScp81Kvn40AesDek,
|
p13n.SdKeyScp81Kvn40Dek ,
|
||||||
p13n.SdKeyScp81Kvn40DesDek,
|
|
||||||
#p13n.SdKeyScp81Kvn40Tlspsk,
|
#p13n.SdKeyScp81Kvn40Tlspsk,
|
||||||
#p13n.SdKeyScp81Kvn41AesDek,
|
#p13n.SdKeyScp81Kvn41Dek ,
|
||||||
#p13n.SdKeyScp81Kvn41DesDek,
|
|
||||||
p13n.SdKeyScp81Kvn41Tlspsk,
|
p13n.SdKeyScp81Kvn41Tlspsk,
|
||||||
#p13n.SdKeyScp81Kvn42AesDek,
|
#p13n.SdKeyScp81Kvn42Dek ,
|
||||||
#p13n.SdKeyScp81Kvn42DesDek,
|
|
||||||
#p13n.SdKeyScp81Kvn42Tlspsk,
|
#p13n.SdKeyScp81Kvn42Tlspsk,
|
||||||
):
|
):
|
||||||
|
|
||||||
|
|||||||
@@ -176,11 +176,12 @@ class TransRecEF_Test(unittest.TestCase):
|
|||||||
|
|
||||||
|
|
||||||
def test_de_encode_record(self):
|
def test_de_encode_record(self):
|
||||||
"""Test the decoder and encoder for a transparent record-oriented EF at the whole-file
|
"""Test the decoder and encoder for a transparent record-oriented EF. Performs first a decoder
|
||||||
level. Performs first a decode test, then re-encodes and compares with the input.
|
test, and then re-encodes the decoded data, comparing the re-encoded data with the
|
||||||
|
initial input data.
|
||||||
|
|
||||||
Requires the given TransRecEF subclass to have a '_test_de_encode' attribute,
|
Requires the given TransRecEF subclass to have a '_test_de_encode' attribute,
|
||||||
containing a list of 2-tuples (hexstring, decoded_list).
|
containing a list of tuples. Each tuple has to be a 2-tuple (hexstring, decoded_dict).
|
||||||
"""
|
"""
|
||||||
for c in self.classes:
|
for c in self.classes:
|
||||||
name = get_qualified_name(c)
|
name = get_qualified_name(c)
|
||||||
@@ -191,12 +192,14 @@ class TransRecEF_Test(unittest.TestCase):
|
|||||||
encoded = t[0]
|
encoded = t[0]
|
||||||
decoded = t[1]
|
decoded = t[1]
|
||||||
logging.debug("Testing decode of %s", name)
|
logging.debug("Testing decode of %s", name)
|
||||||
re_dec = inst.decode_hex(encoded)
|
re_dec = inst.decode_record_hex(encoded)
|
||||||
self.assertEqual(decoded, re_dec)
|
self.assertEqual(decoded, re_dec)
|
||||||
# re-encode the decoded data
|
# re-encode the decoded data
|
||||||
logging.debug("Testing re-encode of %s", name)
|
logging.debug("Testing re-encode of %s", name)
|
||||||
re_enc = inst.encode_hex(re_dec, len(encoded)//2)
|
re_enc = inst.encode_record_hex(re_dec, len(encoded)//2)
|
||||||
self.assertEqual(encoded.upper(), re_enc.upper())
|
self.assertEqual(encoded.upper(), re_enc.upper())
|
||||||
|
# there's no point in testing padded input, as TransRecEF have a fixed record
|
||||||
|
# size and we cannot ever receive more input data than that size.
|
||||||
|
|
||||||
|
|
||||||
class TransparentEF_Test(unittest.TestCase):
|
class TransparentEF_Test(unittest.TestCase):
|
||||||
|
|||||||
@@ -1,144 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
# (C) 2026 by sysmocom - s.f.m.c. GmbH <info@sysmocom.de>
|
|
||||||
#
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 2 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
"""Verify that every CardProfile / CardApplication subclass with EF/DF content,
|
|
||||||
and every standalone CardDF subclass (one not reachable as a child of any profile
|
|
||||||
or application), is either listed in docs/pysim_fs_sphinx.py::SECTIONS or
|
|
||||||
explicitly EXCLUDED."""
|
|
||||||
|
|
||||||
import unittest
|
|
||||||
import importlib
|
|
||||||
import inspect
|
|
||||||
import pkgutil
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
|
|
||||||
# Make docs/pysim_fs_sphinx.py importable without a full Sphinx build.
|
|
||||||
_DOCS_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..', 'docs')
|
|
||||||
sys.path.insert(0, os.path.abspath(_DOCS_DIR))
|
|
||||||
|
|
||||||
import pySim # noqa: E402
|
|
||||||
from pySim.filesystem import CardApplication, CardDF, CardMF, CardADF # noqa: E402
|
|
||||||
from pySim.profile import CardProfile # noqa: E402
|
|
||||||
from pysim_fs_sphinx import EXCLUDED, SECTIONS # noqa: E402
|
|
||||||
|
|
||||||
|
|
||||||
class TestFsCoverage(unittest.TestCase):
|
|
||||||
"""Ensure SECTIONS + EXCLUDED together account for all classes with content."""
|
|
||||||
|
|
||||||
# Base CardDF types that are not concrete filesystem objects on their own.
|
|
||||||
_DF_BASE_TYPES = frozenset([CardDF, CardMF, CardADF])
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _collect_reachable_df_types(obj) -> set:
|
|
||||||
"""Return the set of all CardDF *types* reachable as children of *obj*."""
|
|
||||||
result = set()
|
|
||||||
if isinstance(obj, CardProfile):
|
|
||||||
children = obj.files_in_mf
|
|
||||||
elif isinstance(obj, CardApplication):
|
|
||||||
result.add(type(obj.adf))
|
|
||||||
children = list(obj.adf.children.values())
|
|
||||||
elif isinstance(obj, CardDF):
|
|
||||||
children = list(obj.children.values())
|
|
||||||
else:
|
|
||||||
return result
|
|
||||||
queue = list(children)
|
|
||||||
while queue:
|
|
||||||
child = queue.pop()
|
|
||||||
if isinstance(child, CardDF):
|
|
||||||
result.add(type(child))
|
|
||||||
queue.extend(child.children.values())
|
|
||||||
return result
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _has_content(obj) -> bool:
|
|
||||||
"""Return True if *obj* owns any EFs/DFs."""
|
|
||||||
if isinstance(obj, CardProfile):
|
|
||||||
return bool(obj.files_in_mf)
|
|
||||||
if isinstance(obj, CardApplication):
|
|
||||||
return bool(obj.adf.children)
|
|
||||||
return False
|
|
||||||
|
|
||||||
def test_all_profiles_and_apps_covered(self):
|
|
||||||
# build a set of (module, class-name) pairs that are already accounted for
|
|
||||||
covered = {(mod, cls) for (_, mod, cls) in SECTIONS}
|
|
||||||
accounted_for = covered | EXCLUDED
|
|
||||||
|
|
||||||
uncovered = []
|
|
||||||
reachable_df_types = set()
|
|
||||||
loaded_modules = {}
|
|
||||||
|
|
||||||
for modinfo in pkgutil.walk_packages(pySim.__path__, prefix='pySim.'):
|
|
||||||
modname = modinfo.name
|
|
||||||
try:
|
|
||||||
module = importlib.import_module(modname)
|
|
||||||
except Exception: # skip inport errors, if any
|
|
||||||
continue
|
|
||||||
loaded_modules[modname] = module
|
|
||||||
|
|
||||||
for name, cls in inspect.getmembers(module, inspect.isclass):
|
|
||||||
# skip classes that are merely imported by this module
|
|
||||||
if cls.__module__ != modname:
|
|
||||||
continue
|
|
||||||
# examine only subclasses of CardProfile and CardApplication
|
|
||||||
if not issubclass(cls, (CardProfile, CardApplication)):
|
|
||||||
continue
|
|
||||||
# skip the abstract base classes themselves
|
|
||||||
if cls in (CardProfile, CardApplication):
|
|
||||||
continue
|
|
||||||
# classes that require constructor arguments cannot be probed
|
|
||||||
try:
|
|
||||||
obj = cls()
|
|
||||||
except Exception:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# collect all CardDF types reachable from this profile/application
|
|
||||||
# (used below to identify standalone DFs)
|
|
||||||
reachable_df_types |= self._collect_reachable_df_types(obj)
|
|
||||||
|
|
||||||
if self._has_content(obj) and (modname, name) not in accounted_for:
|
|
||||||
uncovered.append((modname, name))
|
|
||||||
|
|
||||||
# check standalone CardDFs (such as DF.EIRENE or DF.SYSTEM)
|
|
||||||
for modname, module in loaded_modules.items():
|
|
||||||
for name, cls in inspect.getmembers(module, inspect.isclass):
|
|
||||||
if cls.__module__ != modname:
|
|
||||||
continue
|
|
||||||
if not issubclass(cls, CardDF):
|
|
||||||
continue
|
|
||||||
if cls in self._DF_BASE_TYPES:
|
|
||||||
continue
|
|
||||||
if cls in reachable_df_types:
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
obj = cls()
|
|
||||||
except Exception:
|
|
||||||
continue
|
|
||||||
if obj.children and (modname, name) not in accounted_for:
|
|
||||||
uncovered.append((modname, name))
|
|
||||||
|
|
||||||
if uncovered:
|
|
||||||
lines = [
|
|
||||||
'The following classes have EFs/DFs, but not listed in SECTIONS or EXCLUDED:',
|
|
||||||
*(f' {modname}.{name}' for modname, name in sorted(uncovered)),
|
|
||||||
'Please modify docs/pysim_fs_sphinx.py accordingly',
|
|
||||||
]
|
|
||||||
self.fail('\n'.join(lines))
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
unittest.main()
|
|
||||||
@@ -295,7 +295,7 @@ class Install_param_Test(unittest.TestCase):
|
|||||||
load_parameters = gen_install_parameters(256, 256, '010001001505000000000000000000000000')
|
load_parameters = gen_install_parameters(256, 256, '010001001505000000000000000000000000')
|
||||||
self.assertEqual(load_parameters, 'c900ef1cc8020100c7020100ca12010001001505000000000000000000000000')
|
self.assertEqual(load_parameters, 'c900ef1cc8020100c7020100ca12010001001505000000000000000000000000')
|
||||||
|
|
||||||
load_parameters = gen_install_parameters()
|
load_parameters = gen_install_parameters(None, None, '')
|
||||||
self.assertEqual(load_parameters, 'c900')
|
self.assertEqual(load_parameters, 'c900')
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
Reference in New Issue
Block a user