forked from public/pysim
Compare commits
45 Commits
914abe3309
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f2567de387 | ||
|
|
6b5fa38f14 | ||
|
|
45220e00d5 | ||
|
|
5828c92c66 | ||
|
|
5e2fd148f8 | ||
|
|
fc932a2ee9 | ||
|
|
d5aa963caa | ||
|
|
19245d0d8b | ||
|
|
a786590906 | ||
|
|
ca8fada7b6 | ||
|
|
c995bb1ec2 | ||
|
|
ee06ab987f | ||
|
|
a1d3b8f5e8 | ||
|
|
f7b86e1920 | ||
|
|
2cfb0972df | ||
|
|
4215a3bfd3 | ||
|
|
b42d417bbe | ||
|
|
74ac191ae6 | ||
|
|
add4b991b7 | ||
|
|
8c81e2cdf9 | ||
|
|
d9d62ee729 | ||
|
|
c7e68e1281 | ||
|
|
969f9c0e4b | ||
|
|
2ef9abf23e | ||
|
|
473f31066c | ||
|
|
b59363b49e | ||
|
|
115b517c6a | ||
|
|
99aef1fecf | ||
|
|
caddd1c7a0 | ||
|
|
11a7a7e3b1 | ||
|
|
5138208ee6 | ||
|
|
5b2fabde62 | ||
|
|
24127e985a | ||
|
|
09ae327f8b | ||
|
|
d32bce19f6 | ||
|
|
83bfdc0d3b | ||
|
|
14ec52a06c | ||
|
|
209d13e233 | ||
|
|
3b50e64c8b | ||
|
|
b76cc80ea1 | ||
|
|
3b87ba3cba | ||
|
|
ea1d5af383 | ||
|
|
0634f77308 | ||
|
|
a5a5865c7c | ||
|
|
3752aeb94e |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -3,6 +3,7 @@
|
|||||||
|
|
||||||
/docs/_*
|
/docs/_*
|
||||||
/docs/generated
|
/docs/generated
|
||||||
|
/docs/filesystem.rst
|
||||||
/.cache
|
/.cache
|
||||||
/.local
|
/.local
|
||||||
/build
|
/build
|
||||||
|
|||||||
@@ -285,10 +285,7 @@ if __name__ == '__main__':
|
|||||||
option_parser.add_argument("--admin", action='store_true', help="perform action as admin", default=False)
|
option_parser.add_argument("--admin", action='store_true', help="perform action as admin", default=False)
|
||||||
opts = option_parser.parse_args()
|
opts = option_parser.parse_args()
|
||||||
|
|
||||||
PySimLogger.setup(print, {logging.WARN: "\033[33m"})
|
PySimLogger.setup(print, {logging.WARN: "\033[33m"}, opts.verbose)
|
||||||
if (opts.verbose):
|
|
||||||
PySimLogger.set_verbose(True)
|
|
||||||
PySimLogger.set_level(logging.DEBUG)
|
|
||||||
|
|
||||||
# Open CSV file
|
# Open CSV file
|
||||||
cr = open_csv(opts)
|
cr = open_csv(opts)
|
||||||
|
|||||||
@@ -305,16 +305,16 @@ the requested data.
|
|||||||
|
|
||||||
|
|
||||||
ADM PIN
|
ADM PIN
|
||||||
~~~~~~~
|
^^^^^^^
|
||||||
|
|
||||||
The `verify_adm` command will attempt to look up the `ADM1` column
|
The `verify_adm` command will attempt to look up the `ADM1` column
|
||||||
indexed by the ICCID of the SIM/UICC.
|
indexed by the ICCID of the SIM/UICC.
|
||||||
|
|
||||||
|
|
||||||
SCP02 / SCP03
|
SCP02 / SCP03
|
||||||
~~~~~~~~~~~~~
|
^^^^^^^^^^^^^
|
||||||
|
|
||||||
SCP02 and SCP03 each use key triplets consisting if ENC, MAC and DEK
|
SCP02 and SCP03 each use key triplets consisting of ENC, MAC and DEK
|
||||||
keys. For more details, see the applicable GlobalPlatform
|
keys. For more details, see the applicable GlobalPlatform
|
||||||
specifications.
|
specifications.
|
||||||
|
|
||||||
|
|||||||
26
docs/conf.py
26
docs/conf.py
@@ -13,6 +13,7 @@
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
sys.path.insert(0, os.path.abspath('..'))
|
sys.path.insert(0, os.path.abspath('..'))
|
||||||
|
sys.path.insert(0, os.path.abspath('.')) # for local extensions (pysim_fs_sphinx, ...)
|
||||||
|
|
||||||
|
|
||||||
# -- Project information -----------------------------------------------------
|
# -- Project information -----------------------------------------------------
|
||||||
@@ -39,7 +40,8 @@ extensions = [
|
|||||||
"sphinx.ext.autodoc",
|
"sphinx.ext.autodoc",
|
||||||
"sphinxarg.ext",
|
"sphinxarg.ext",
|
||||||
"sphinx.ext.autosectionlabel",
|
"sphinx.ext.autosectionlabel",
|
||||||
"sphinx.ext.napoleon"
|
"sphinx.ext.napoleon",
|
||||||
|
"pysim_fs_sphinx",
|
||||||
]
|
]
|
||||||
|
|
||||||
# Add any paths that contain templates here, relative to this directory.
|
# Add any paths that contain templates here, relative to this directory.
|
||||||
@@ -64,3 +66,25 @@ html_theme = 'alabaster'
|
|||||||
html_static_path = ['_static']
|
html_static_path = ['_static']
|
||||||
|
|
||||||
autoclass_content = 'both'
|
autoclass_content = 'both'
|
||||||
|
|
||||||
|
# Mock optional server-side deps of es2p and http_json_api/es9p,
|
||||||
|
# so that autodoc can import and document those modules.
|
||||||
|
autodoc_mock_imports = ['klein', 'twisted']
|
||||||
|
|
||||||
|
# Workaround for duplicate label warnings:
|
||||||
|
# https://github.com/sphinx-doc/sphinx-argparse/issues/14
|
||||||
|
#
|
||||||
|
# sphinxarg.ext generates generic sub-headings ("Named arguments",
|
||||||
|
# "Positional arguments", "Sub-commands", "General options", ...) for every
|
||||||
|
# argparse command/tool. These repeat across many files and trigger tons
|
||||||
|
# of autosectionlabel duplicate-label warnings - suppress them.
|
||||||
|
autosectionlabel_maxdepth = 3
|
||||||
|
suppress_warnings = [
|
||||||
|
'autosectionlabel.filesystem',
|
||||||
|
'autosectionlabel.saip-tool',
|
||||||
|
'autosectionlabel.shell',
|
||||||
|
'autosectionlabel.smpp2sim',
|
||||||
|
'autosectionlabel.smpp-ota-tool',
|
||||||
|
'autosectionlabel.suci-keytool',
|
||||||
|
'autosectionlabel.trace',
|
||||||
|
]
|
||||||
|
|||||||
@@ -39,6 +39,7 @@ pySim consists of several parts:
|
|||||||
:caption: Contents:
|
:caption: Contents:
|
||||||
|
|
||||||
shell
|
shell
|
||||||
|
filesystem
|
||||||
trace
|
trace
|
||||||
legacy
|
legacy
|
||||||
smpp2sim
|
smpp2sim
|
||||||
|
|||||||
@@ -205,7 +205,7 @@ Specifically, pySim-read will dump the following:
|
|||||||
|
|
||||||
* DF.GSM
|
* DF.GSM
|
||||||
|
|
||||||
* EF,IMSI
|
* EF.IMSI
|
||||||
* EF.GID1
|
* EF.GID1
|
||||||
* EF.GID2
|
* EF.GID2
|
||||||
* EF.SMSP
|
* EF.SMSP
|
||||||
|
|||||||
836
docs/put_key-tutorial.rst
Normal file
836
docs/put_key-tutorial.rst
Normal file
@@ -0,0 +1,836 @@
|
|||||||
|
Guide: Managing GP Keys
|
||||||
|
=======================
|
||||||
|
|
||||||
|
Most of today's smartcards follow the GlobalPlatform Card Specification and the included Security Domain model.
|
||||||
|
UICCs and eUCCCs are no exception here.
|
||||||
|
|
||||||
|
The Security Domain acts as an on-card representative of a card authority or administrator. It is used to perform tasks
|
||||||
|
like the installation of applications or the provisioning and rotation of secure channel keys. It also acts as a secure
|
||||||
|
key storage and offers all kinds of cryptographic services to applications that are installed under a specific
|
||||||
|
Security Domain (see also GlobalPlatform Card Specification, section 7).
|
||||||
|
|
||||||
|
In this tutorial, we will show how to work with the key material (keysets) stored inside a Security Domain and how to
|
||||||
|
rotate (replace) existing keys. We will also show how to provision new keys.
|
||||||
|
|
||||||
|
.. warning:: Making changes to keysets requires extreme caution as misconfigured keysets may lock you out permanently.
|
||||||
|
It's also strongly recommended to maintain at least one backup keyset that you can use as fallback in case
|
||||||
|
the primary keyset becomes unusable for some reason.
|
||||||
|
|
||||||
|
|
||||||
|
Selecting a Security Domain
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
A typical smartcard, such as an UICC will have one primary Security Domain, called the Issuer Security Domain (ISD).
|
||||||
|
When working with those cards, the ISD will show up in the UICC filesystem tree as `ADF.ISD` and can be selected like
|
||||||
|
any other file.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
pySIM-shell (00:MF)> select ADF.ISD
|
||||||
|
{
|
||||||
|
"application_id": "a000000003000000",
|
||||||
|
"proprietary_data": {
|
||||||
|
"maximum_length_of_data_field_in_command_message": 255
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
When working with eUICCs, multiple Security Domains are involved. The model is fundamentally different from the classic
|
||||||
|
model with one primary Security Domain (ISD). In the case of eUICCs, an ISD-R (Issuer Security Domain - Root) and an
|
||||||
|
ISD-P (Issuer Security Domain - Profile) exist (see also: GSMA SGP.02, section 2.2.1).
|
||||||
|
|
||||||
|
The ISD-P is established by the ISD-R during the profile installation and serves as a secure container for an eSIM
|
||||||
|
profile. Within the ISD-P the eSIM profile establishes a dedicated Security Domain called `MNO-SD` (see also GSMA
|
||||||
|
SGP.02, section 2.2.4). This `MNO-SD` is comparable to the Issuer Security Domain (ISD) we find on UICCs. The AID of
|
||||||
|
`MNO-SD` is either the default AID for the Issuer Security Domain (see also GlobalPlatform, section H.1.3) or a
|
||||||
|
different value specified by the provider of the eSIM profile.
|
||||||
|
|
||||||
|
Since the AID of the `MNO-SD` is not a fixed value, it is not known by `pySim-shell`. This means there will be no
|
||||||
|
`ADF.ISD` file shown in the file system, but we can simply select the `ADF.ISD-R` first and then select the `MNO-SD`
|
||||||
|
using a raw APDU. In the following example we assume that the default AID (``a000000151000000``) is used The APDU
|
||||||
|
would look like this: ``00a4040408`` + ``a000000151000000`` + ``00``
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
pySIM-shell (00:MF)> select ADF.ISD-R
|
||||||
|
{
|
||||||
|
"application_id": "a0000005591010ffffffff8900000100",
|
||||||
|
"proprietary_data": {
|
||||||
|
"maximum_length_of_data_field_in_command_message": 255
|
||||||
|
},
|
||||||
|
"isdr_proprietary_application_template": {
|
||||||
|
"supported_version_number": "020300"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pySIM-shell (00:MF/ADF.ISD-R)> apdu 00a4040408a00000015100000000
|
||||||
|
SW: 9000, RESP: 6f108408a000000151000000a5049f6501ff
|
||||||
|
|
||||||
|
After that, the prompt will still show the `ADF.ISD-R`, but we are actually in `ADF.ISD` and the standard GlobalPlatform
|
||||||
|
operations like `establish_scpXX`, `get_data`, and `put_key` should work. By doing this, we simply have tricked
|
||||||
|
`pySim-shell` into making the GlobalPlatform related commands available for some other Security Domain we are not
|
||||||
|
interested in. With the raw APDU we then have swapped out the Security Domain under the hood. The same workaround can
|
||||||
|
be applied to any Security Domain, provided that the AID is known to the user.
|
||||||
|
|
||||||
|
|
||||||
|
Establishing a secure channel
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Before we can make changes to the keysets in the currently selected Security Domain we must first establish a secure
|
||||||
|
channel with that Security Domain. In the following examples we will use `SCP02` (see also GlobalPlatform Card
|
||||||
|
Specification, section E.1.1) and `SCP03` (see also GlobalPlatform Card Specification – Amendment D) to establish the
|
||||||
|
secure channel. `SCP02` is slightly older than `SCP03`. The main difference between the two is that `SCP02` uses 3DES
|
||||||
|
while `SCP03` is based on AES.
|
||||||
|
|
||||||
|
.. warning:: Secure channel protocols like `SCP02` and `SCP03` may manage an error counter to count failed login
|
||||||
|
attempts. This means attempting to establish a secure channel with a wrong keyset multiple times may lock
|
||||||
|
you out permanently. Double check the applied keyset before attempting to establish a secure channel.
|
||||||
|
|
||||||
|
.. warning:: The key values used in the following examples are random key values used for illustration purposes only.
|
||||||
|
Each UICC or eSIM profile is shipped with individual keys, which means that the keys used below will not
|
||||||
|
work with your UICC or eSIM profile. You must replace the key values with the values you have received
|
||||||
|
from your UICC vendor or eSIM profile provider.
|
||||||
|
|
||||||
|
|
||||||
|
Example: `SCP02`
|
||||||
|
----------------
|
||||||
|
|
||||||
|
In the following example, we assume that we want to establish a secure channel with the ISD of a `sysmoUSIM-SJA5` UICC.
|
||||||
|
Along with the card we have received the following keyset:
|
||||||
|
|
||||||
|
+---------+----------------------------------+
|
||||||
|
| Keyname | Keyvalue |
|
||||||
|
+=========+==================================+
|
||||||
|
| ENC/KIC | F09C43EE1A0391665CC9F05AF4E0BD10 |
|
||||||
|
+---------+----------------------------------+
|
||||||
|
| MAC/KID | 01981F4A20999F62AF99988007BAF6CA |
|
||||||
|
+---------+----------------------------------+
|
||||||
|
| DEK/KIK | 8F8AEE5CDCC5D361368BC45673D99195 |
|
||||||
|
+---------+----------------------------------+
|
||||||
|
|
||||||
|
This keyset is tied to the key version number KVN 122 and is configured as a DES keyset. We can use this keyset to
|
||||||
|
establish a secure channel using the SCP02 Secure Channel Protocol.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
pySIM-shell (00:MF/ADF.ISD)> establish_scp02 --key-enc F09C43EE1A0391665CC9F05AF4E0BD10 --key-mac 01981F4A20999F62AF99988007BAF6CA --key-dek 8F8AEE5CDCC5D361368BC45673D99195 --key-ver 112 --security-level 3
|
||||||
|
Successfully established a SCP02[03] secure channel
|
||||||
|
|
||||||
|
|
||||||
|
Example: `SCP03`
|
||||||
|
----------------
|
||||||
|
|
||||||
|
The establishment of a secure channel via SCP03 works just the same. In the following example we will establish a
|
||||||
|
secure channel to the `MNO-SD` of an eSIM profile. The SCP03 keyset we use is tied to KVN 48 and looks like this:
|
||||||
|
|
||||||
|
+---------+------------------------------------------------------------------+
|
||||||
|
| Keyname | Keyvalue |
|
||||||
|
+=========+==================================================================+
|
||||||
|
| ENC/KIC | 63af517c29ad6ac6fcadfe6ac8a3c8a041d8141c7eb845ef1cba6112a325e430 |
|
||||||
|
+---------+------------------------------------------------------------------+
|
||||||
|
| MAC/KID | 54b9ad6713ae922f54014ed762132e7b59bdcd2a2a6beba98fb9afe6b4df27e1 |
|
||||||
|
+---------+------------------------------------------------------------------+
|
||||||
|
| DEK/KIK | cbb933ba2389da93c86c112739cd96389139f16c6f80f7d16bf3593e407ca893 |
|
||||||
|
+---------+------------------------------------------------------------------+
|
||||||
|
|
||||||
|
We assume that the `MNO-SD` is already selected (see above). We may now establish the SCP03 secure channel:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
pySIM-shell (00:MF/ADF.ISD-R)> establish_scp03 --key-enc 63af517c29ad6ac6fcadfe6ac8a3c8a041d8141c7eb845ef1cba6112a325e430 --key-mac 54b9ad6713ae922f54014ed762132e7b59bdcd2a2a6beba98fb9afe6b4df27e1 --key-dek cbb933ba2389da93c86c112739cd96389139f16c6f80f7d16bf3593e407ca893 --key-ver 48 --security-level 3
|
||||||
|
Successfully established a SCP03[03] secure channel
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Understanding Keysets
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Before making any changes to keysets, it is recommended to check the status of the currently installed keysets. To do
|
||||||
|
so, we use the `get_data` command to retrieve the `key_information`. This command does not require the establishment of
|
||||||
|
a secure channel. We also cannot read back the key values themselves, but we get a summary of the installed keys
|
||||||
|
together with their KVN numbers, IDs, algorithm and key length values.
|
||||||
|
|
||||||
|
Example: `key_information` from a `sysmoISIM-SJA5`:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
pySIM-shell (SCP02[03]:00:MF/ADF.ISD)> get_data key_information
|
||||||
|
{
|
||||||
|
"key_information": [
|
||||||
|
{
|
||||||
|
"key_information_data": {
|
||||||
|
"key_identifier": 1,
|
||||||
|
"key_version_number": 112,
|
||||||
|
"key_types": [
|
||||||
|
{
|
||||||
|
"type": "des",
|
||||||
|
"length": 16
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_information_data": {
|
||||||
|
"key_identifier": 2,
|
||||||
|
"key_version_number": 112,
|
||||||
|
"key_types": [
|
||||||
|
{
|
||||||
|
"type": "des",
|
||||||
|
"length": 16
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_information_data": {
|
||||||
|
"key_identifier": 3,
|
||||||
|
"key_version_number": 112,
|
||||||
|
"key_types": [
|
||||||
|
{
|
||||||
|
"type": "des",
|
||||||
|
"length": 16
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_information_data": {
|
||||||
|
"key_identifier": 1,
|
||||||
|
"key_version_number": 1,
|
||||||
|
"key_types": [
|
||||||
|
{
|
||||||
|
"type": "des",
|
||||||
|
"length": 16
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_information_data": {
|
||||||
|
"key_identifier": 2,
|
||||||
|
"key_version_number": 1,
|
||||||
|
"key_types": [
|
||||||
|
{
|
||||||
|
"type": "des",
|
||||||
|
"length": 16
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_information_data": {
|
||||||
|
"key_identifier": 3,
|
||||||
|
"key_version_number": 1,
|
||||||
|
"key_types": [
|
||||||
|
{
|
||||||
|
"type": "des",
|
||||||
|
"length": 16
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_information_data": {
|
||||||
|
"key_identifier": 1,
|
||||||
|
"key_version_number": 2,
|
||||||
|
"key_types": [
|
||||||
|
{
|
||||||
|
"type": "des",
|
||||||
|
"length": 16
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_information_data": {
|
||||||
|
"key_identifier": 2,
|
||||||
|
"key_version_number": 2,
|
||||||
|
"key_types": [
|
||||||
|
{
|
||||||
|
"type": "des",
|
||||||
|
"length": 16
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_information_data": {
|
||||||
|
"key_identifier": 3,
|
||||||
|
"key_version_number": 2,
|
||||||
|
"key_types": [
|
||||||
|
{
|
||||||
|
"type": "des",
|
||||||
|
"length": 16
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_information_data": {
|
||||||
|
"key_identifier": 1,
|
||||||
|
"key_version_number": 47,
|
||||||
|
"key_types": [
|
||||||
|
{
|
||||||
|
"type": "des",
|
||||||
|
"length": 16
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_information_data": {
|
||||||
|
"key_identifier": 2,
|
||||||
|
"key_version_number": 47,
|
||||||
|
"key_types": [
|
||||||
|
{
|
||||||
|
"type": "des",
|
||||||
|
"length": 16
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_information_data": {
|
||||||
|
"key_identifier": 3,
|
||||||
|
"key_version_number": 47,
|
||||||
|
"key_types": [
|
||||||
|
{
|
||||||
|
"type": "des",
|
||||||
|
"length": 16
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
Example: `key_information` from a `sysmoEUICC1-C2T`:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
pySIM-shell (SCP03[03]:00:MF/ADF.ISD-R)> get_data key_information
|
||||||
|
{
|
||||||
|
"key_information": [
|
||||||
|
{
|
||||||
|
"key_information_data": {
|
||||||
|
"key_identifier": 3,
|
||||||
|
"key_version_number": 50,
|
||||||
|
"key_types": [
|
||||||
|
{
|
||||||
|
"type": "aes",
|
||||||
|
"length": 32
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_information_data": {
|
||||||
|
"key_identifier": 2,
|
||||||
|
"key_version_number": 50,
|
||||||
|
"key_types": [
|
||||||
|
{
|
||||||
|
"type": "aes",
|
||||||
|
"length": 32
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_information_data": {
|
||||||
|
"key_identifier": 1,
|
||||||
|
"key_version_number": 50,
|
||||||
|
"key_types": [
|
||||||
|
{
|
||||||
|
"type": "aes",
|
||||||
|
"length": 32
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_information_data": {
|
||||||
|
"key_identifier": 2,
|
||||||
|
"key_version_number": 64,
|
||||||
|
"key_types": [
|
||||||
|
{
|
||||||
|
"type": "aes",
|
||||||
|
"length": 16
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_information_data": {
|
||||||
|
"key_identifier": 1,
|
||||||
|
"key_version_number": 64,
|
||||||
|
"key_types": [
|
||||||
|
{
|
||||||
|
"type": "tls_psk",
|
||||||
|
"length": 16
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
The output from those two examples above may seem lengthy, but in order to move on and to provision own keys
|
||||||
|
successfully, it is important to understand each aspect of it.
|
||||||
|
|
||||||
|
Key Version Number (KVN)
|
||||||
|
------------------------
|
||||||
|
|
||||||
|
Each key is associated with a Key Version Number (KVN). Multiple keys that share the same KVN belong to the same
|
||||||
|
keyset. In the first example above we can see that four keysets with KVN numbers 112, 1, 2 and 47 are provisioned.
|
||||||
|
In the second example we see two keysets. One with KVN 50 and one with KVN 64.
|
||||||
|
|
||||||
|
The term "Key Version Number" is misleading as this number is not really a version number. It's actually a unique
|
||||||
|
identifier for a specific keyset that also defines with which Secure Channel Protocol a key can be used. This means
|
||||||
|
that the KVN is not just an arbitrary number. The following (incomplete) table gives a hint which KVN numbers may be
|
||||||
|
used with which Secure Channel Protocol.
|
||||||
|
|
||||||
|
+-----------+-------------------------------------------------------+
|
||||||
|
| KVN range | Secure Channel Protocol |
|
||||||
|
+===========+=======================================================+
|
||||||
|
| 1-15 | reserved for `SCP80` (OTA SMS) |
|
||||||
|
+-----------+-------------------------------------------------------+
|
||||||
|
| 17 | reserved for DAP specified in ETSI TS 102 226 |
|
||||||
|
+-----------+-------------------------------------------------------+
|
||||||
|
| 32-47 | reserved for `SCP02` |
|
||||||
|
+-----------+-------------------------------------------------------+
|
||||||
|
| 48-63 | reserved for `SCP03` |
|
||||||
|
+-----------+-------------------------------------------------------+
|
||||||
|
| 64-79 | reserved for `SCP81` (GSMA SGP.02, section 2.2.5.1) |
|
||||||
|
+-----------+-------------------------------------------------------+
|
||||||
|
| 112 | Token key (RSA public or DES, also used with `SCP02`) |
|
||||||
|
+-----------+-------------------------------------------------------+
|
||||||
|
| 113 | Receipt key (DES) |
|
||||||
|
+-----------+-------------------------------------------------------+
|
||||||
|
| 115 | DAP verification key (RS public or DES) |
|
||||||
|
+-----------+-------------------------------------------------------+
|
||||||
|
| 116 | reserved for CASD |
|
||||||
|
+-----------+-------------------------------------------------------+
|
||||||
|
| 117 | 16-byte DES key for Ciphered Load File Data Block |
|
||||||
|
+-----------+-------------------------------------------------------+
|
||||||
|
| 255 | reserved for ISD with SCP02 without SCP80 support |
|
||||||
|
+-----------+-------------------------------------------------------+
|
||||||
|
|
||||||
|
With that we can now understand that in the first example, the first and the last keyset is intended to be used with
|
||||||
|
`SCP02` and that the second and the third keyset is intended to be used with `SCP80` (OTA SMS). In the second example we
|
||||||
|
can see that the first keyset is intended to be used with `SCP03`, wheres the second should be usable with `SCP81`.
|
||||||
|
|
||||||
|
|
||||||
|
Key Identifier
|
||||||
|
--------------
|
||||||
|
|
||||||
|
Each keyset consists of a number of keys, where each key has a different Key Identifier. The Key Identifier is usually
|
||||||
|
an incrementing number that starts counting at 1. The Key Identifier is used to distinguish the keys within the keyset.
|
||||||
|
The exact number of keys and their attributes depends on the secure channel protocol for which the keyset is intended
|
||||||
|
for. Each secure channel protocol may have its specific requirements on how many keys of which which type, length or
|
||||||
|
Key Identifier have to be present.
|
||||||
|
|
||||||
|
However, almost all of the classic secure channel protocols (including `SCP02`, `SCP03` and `SCP81`) make use of the
|
||||||
|
following three-key scheme:
|
||||||
|
|
||||||
|
+----------------+---------+---------------------------------------+
|
||||||
|
| Key Identifier | Keyname | Purpose |
|
||||||
|
+================+=========+=======================================+
|
||||||
|
| 1 | ENC/KIC | encryption/decryption |
|
||||||
|
+----------------+---------+---------------------------------------+
|
||||||
|
| 2 | MAC/KID | cryptographic checksumming/signing |
|
||||||
|
+----------------+---------+---------------------------------------+
|
||||||
|
| 3 | DEK/KIK | encryption/decryption of key material |
|
||||||
|
+----------------+---------+---------------------------------------+
|
||||||
|
|
||||||
|
In this case, all three keys share the same length and are used with the same algorithm. The key length is often used
|
||||||
|
to implicitly select sub-types of an algorithm. (e.g. a 16 byte key of type `aes` is associated with `AES128`, where a 32
|
||||||
|
byte key would be associated with `AES256`).
|
||||||
|
|
||||||
|
The second example shows that different schemes are possible. The `SCP80` keyset from the second example uses a scheme
|
||||||
|
that works with two keys:
|
||||||
|
|
||||||
|
+----------------+---------+---------------------------------------+
|
||||||
|
| Key Identifier | Keyname | Purpose |
|
||||||
|
+================+=========+=======================================+
|
||||||
|
| 1 | TLS-PSK | pre-shared key used for TLS |
|
||||||
|
+----------------+---------+---------------------------------------+
|
||||||
|
| 2 | DEK/KIK | encryption/decryption of key material |
|
||||||
|
+----------------+---------+---------------------------------------+
|
||||||
|
|
||||||
|
It should also be noted that the order in which keysets and keys appear is an implementation detail of the UICC/eUICC
|
||||||
|
O/S. The order has no influence on how a keyset is interpreted. Only the Key Version Number (KVN) and the Key Identifier
|
||||||
|
matter.
|
||||||
|
|
||||||
|
|
||||||
|
Rotating a keyset
|
||||||
|
~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Rotating keys is one of the most basic tasks one might want to perform on an UICC/eUICC before using it productively. In
|
||||||
|
the following example we will illustrate how key rotation can be done. When rotating keys, only the key itself may
|
||||||
|
change. For example it is not possible to change the key length or the algorithm used (see also GlobalPlatform Card
|
||||||
|
Specification, section 11.8.2.3.3). Any key of the current Security Domain can be rotated, this also includes the key
|
||||||
|
that was used to establish the secure channel.
|
||||||
|
|
||||||
|
In the following example we assume that the Security Domain is selected and a secure channel is already established. We
|
||||||
|
intend to rotate the keyset with KVN 112. Since this keyset uses triple DES keys with a key length of 16, we must
|
||||||
|
replace it with a keyset with keys of the same nature.
|
||||||
|
|
||||||
|
The new keyset shall look like this:
|
||||||
|
|
||||||
|
+----------------+---------+----------------------------------+
|
||||||
|
| Key Identifier | Keyname | Keyvalue |
|
||||||
|
+================+=========+==================================+
|
||||||
|
| 1 | ENC/KIC | 542C37A6043679F2F9F71116418B1CD5 |
|
||||||
|
+----------------+---------+----------------------------------+
|
||||||
|
| 2 | MAC/KID | 34F11BAC8E5390B57F4E601372339E3C |
|
||||||
|
+----------------+---------+----------------------------------+
|
||||||
|
| 3 | DEK/KIK | 5524F4BECFE96FB63FC29D6BAAC6058B |
|
||||||
|
+----------------+---------+----------------------------------+
|
||||||
|
|
||||||
|
When passing the keys to the `put_key` commandline, we set the Key Identifier of the first key using the `--key-id`
|
||||||
|
parameter. This Key Identifier will be valid for the first key (KIC) we pass. For all consecutive keys, the Key
|
||||||
|
Identifier will be incremented automatically (see also GlobalPlatform Card Specification, section 11.8.2.2). To Ensure
|
||||||
|
that the new KIC, KID and KIK keys get the correct Key Identifiers, it is crucial to maintain order when passing the
|
||||||
|
keys in the `--key-data` arguments. It is also important that each `--key-data` argument is preceded by a `--key-type`
|
||||||
|
argument that sets the algorithm correctly (`des` in this case).
|
||||||
|
|
||||||
|
Finally we have to target the keyset we want to rotate by its KVN. The `--old-key-version-nr` argument is set to 112
|
||||||
|
as this identifies the keyset we want to rotate. The `--key-version-nr` is also set to 112 as we do not want
|
||||||
|
KVN to be changed in this example. Changing the KVN while rotating a keyset is possible. In case the KVN has to change
|
||||||
|
for some reason, the new KVN must be selected carefully to keep the key usable with the associated Secure Channel
|
||||||
|
Protocol.
|
||||||
|
|
||||||
|
The commandline that matches the keyset we had laid out above looks like this:
|
||||||
|
::
|
||||||
|
|
||||||
|
pySIM-shell (SCP02[03]:00:MF/ADF.ISD)> put_key --key-id 1 --key-type des --key-data 542C37A6043679F2F9F71116418B1CD5 --key-type des --key-data 34F11BAC8E5390B57F4E601372339E3C --key-type des --key-data 5524F4BECFE96FB63FC29D6BAAC6058B --old-key-version-nr 112 --key-version-nr 112
|
||||||
|
|
||||||
|
After executing this put_key commandline, the keyset identified by KVN 122 is equipped with new keys. We can use
|
||||||
|
`get_data key_information` to inspect the currently installed keysets. The output should appear unchanged as
|
||||||
|
we only swapped out the keys. All other parameters, identifiers etc. should remain constant.
|
||||||
|
|
||||||
|
.. warning:: It is technically possible to rotate a keyset in a `non atomic` way using one `put_key` commandline for
|
||||||
|
each key. However, in case the targeted keyset is the one used to establish the current secure channel,
|
||||||
|
this method should not be used since, depending on the UICC/eUICC model, half-written key material may
|
||||||
|
interrupt the current secure channel.
|
||||||
|
|
||||||
|
|
||||||
|
Removing a keyset
|
||||||
|
~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
In some cases it is necessary to remove a keyset entirely. This can be done with the `delete_key` command. Here it is
|
||||||
|
important to understand that `delete_key` only removes one specific key from a specific keyset. This means that you
|
||||||
|
need to run a separate `delete_key` command for each key inside a keyset.
|
||||||
|
|
||||||
|
In the following example we assume that the Security Domain is selected and a secure channel is already established. We
|
||||||
|
intend to remove the keyset with KVN 112. This keyset consists of three keys.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
pySIM-shell (SCP02[03]:00:MF/ADF.ISD)> delete_key --key-ver 112 --key-id 1
|
||||||
|
pySIM-shell (SCP02[03]:00:MF/ADF.ISD)> delete_key --key-ver 112 --key-id 2
|
||||||
|
pySIM-shell (SCP02[03]:00:MF/ADF.ISD)> delete_key --key-ver 112 --key-id 3
|
||||||
|
|
||||||
|
To verify that the keyset has been deleted properly, we can use the `get_data key_information` command to inspect the
|
||||||
|
current status of the installed keysets. We should see that the key with KVN 112 is no longer present.
|
||||||
|
|
||||||
|
|
||||||
|
Adding a keyset
|
||||||
|
~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
In the following we will discuss how to add an entirely new keyset. The procedure is almost identical with the key
|
||||||
|
rotation procedure we have already discussed and it is assumed that all details about the key rotation are understood.
|
||||||
|
In this section we will go into more detail and illustrate how to provision new 3DES, `AES128` and `AES256` keysets.
|
||||||
|
|
||||||
|
It is important to keep in mind that storage space on smartcard is a precious resource. In many cases the amount of
|
||||||
|
keysets that a Security Domain can store is limited. In some situations you may be forced to sacrifice one of your
|
||||||
|
existing keysets in favor of a new keyset.
|
||||||
|
|
||||||
|
The main difference between key rotation and the adding of new keys is that we do not simply replace an existing key.
|
||||||
|
Instead an entirely new key is programmed into the Security Domain. Therefore the `put_key` commandline will have no
|
||||||
|
`--old-key-version-nr` parameter. From the commandline perspective, this is already the only visible difference from a
|
||||||
|
commandline that simply rotates a keyset. Since we are writing an entirely new keyset, we are free to chose the
|
||||||
|
algorithm and the key length within the parameter range permitted by the targeted secure channel protocol. Otherwise
|
||||||
|
the same rules apply.
|
||||||
|
|
||||||
|
For reference, it should be mentioned that it is also possible to add or rotate keyset using multiple `put_key`
|
||||||
|
commandlines. In this case one `put_key` commandline for each key is used. Each commandline will specify `--key-id` and
|
||||||
|
`--key-version-nr` and one `--key-type` and `--key-data` tuple. However, when rotating or adding a keyset step-by-step,
|
||||||
|
the whole process happens in a `non-atomic` way, which is less reliable. Therefore we will favor the `atomic method`
|
||||||
|
|
||||||
|
In the following examples we assume that the Security Domain is selected and a secure channel is already established.
|
||||||
|
|
||||||
|
|
||||||
|
Example: `3DES` key for `SCP02`
|
||||||
|
-------------------------------
|
||||||
|
|
||||||
|
Let's assume we want to provision a new 3DES keyset that we can use for SCP02. The keyset shall look like this:
|
||||||
|
|
||||||
|
+----------------+---------+----------------------------------+
|
||||||
|
| Key Identifier | Keyname | Keyvalue |
|
||||||
|
+================+=========+==================================+
|
||||||
|
| 1 | ENC/KIC | 542C37A6043679F2F9F71116418B1CD5 |
|
||||||
|
+----------------+---------+----------------------------------+
|
||||||
|
| 2 | MAC/KID | 34F11BAC8E5390B57F4E601372339E3C |
|
||||||
|
+----------------+---------+----------------------------------+
|
||||||
|
| 3 | DEK/KIK | 5524F4BECFE96FB63FC29D6BAAC6058B |
|
||||||
|
+----------------+---------+----------------------------------+
|
||||||
|
|
||||||
|
The keyset shall be a associated with the KVN 46. We have made sure before that KVN 46 is still unused and that this
|
||||||
|
KVN number is actually suitable for SCP02 keys. As we are using 3DES, it is obvious that we have to pass 3 keys with 16
|
||||||
|
byte length.
|
||||||
|
|
||||||
|
To program the key, we may use the following commandline. As we can see, this commandline is almost the exact same as
|
||||||
|
the one from the key rotation example where we were rotating a 3DES key. The only difference is that we didn't specify
|
||||||
|
an old KVN number and that we have chosen a different KVN.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
pySIM-shell (SCP02[03]:00:MF/ADF.ISD)> put_key --key-id 1 --key-type des --key-data 542C37A6043679F2F9F71116418B1CD5 --key-type des --key-data 34F11BAC8E5390B57F4E601372339E3C --key-type des --key-data 5524F4BECFE96FB63FC29D6BAAC6058B --key-version-nr 46
|
||||||
|
|
||||||
|
In case of success, the keyset should appear in the `key_information` among the other keysets that are already present.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
pySIM-shell (SCP02[03]:00:MF/ADF.ISD)> get_data key_information
|
||||||
|
{
|
||||||
|
"key_information": [
|
||||||
|
{
|
||||||
|
"key_information_data": {
|
||||||
|
"key_identifier": 1,
|
||||||
|
"key_version_number": 46,
|
||||||
|
"key_types": [
|
||||||
|
{
|
||||||
|
"type": "des",
|
||||||
|
"length": 16
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_information_data": {
|
||||||
|
"key_identifier": 2,
|
||||||
|
"key_version_number": 46,
|
||||||
|
"key_types": [
|
||||||
|
{
|
||||||
|
"type": "des",
|
||||||
|
"length": 16
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_information_data": {
|
||||||
|
"key_identifier": 3,
|
||||||
|
"key_version_number": 46,
|
||||||
|
"key_types": [
|
||||||
|
{
|
||||||
|
"type": "des",
|
||||||
|
"length": 16
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
...
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Example: `AES128` key for `SCP80`
|
||||||
|
---------------------------------
|
||||||
|
|
||||||
|
In this example we intend to provision a new `AES128` keyset that we can use with SCP80 (OTA SMS). The keyset shall look
|
||||||
|
like this:
|
||||||
|
|
||||||
|
+----------------+---------+----------------------------------+
|
||||||
|
| Key Identifier | Keyname | Keyvalue |
|
||||||
|
+================+=========+==================================+
|
||||||
|
| 1 | ENC/KIC | 542C37A6043679F2F9F71116418B1CD5 |
|
||||||
|
+----------------+---------+----------------------------------+
|
||||||
|
| 2 | MAC/KID | 34F11BAC8E5390B57F4E601372339E3C |
|
||||||
|
+----------------+---------+----------------------------------+
|
||||||
|
| 3 | DEK/KIK | 5524F4BECFE96FB63FC29D6BAAC6058B |
|
||||||
|
+----------------+---------+----------------------------------+
|
||||||
|
|
||||||
|
In addition to that, we want to associate this key with KVN 3. We have inspected the currently installed keysets before
|
||||||
|
and made sure that KVN 3 is still unused. We are also aware that for SCP80 we may only use KVN values from 1 to 15.
|
||||||
|
|
||||||
|
For `AES128`, we specify the algorithm using the `--key-type aes` parameter. The selection between `AES128` and `AES256` is
|
||||||
|
done implicitly using the key length. Since we want to use `AES128` in this case, all three keys have a length of 16 byte.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
pySIM-shell (SCP02[03]:00:MF/ADF.ISD)> put_key --key-id 1 --key-type aes --key-data 542C37A6043679F2F9F71116418B1CD5 --key-type aes --key-data 34F11BAC8E5390B57F4E601372339E3C --key-type aes --key-data 5524F4BECFE96FB63FC29D6BAAC6058B --key-version-nr 3
|
||||||
|
|
||||||
|
In case of success, the keyset should appear in the `key_information` among the other keysets that are already present.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
pySIM-shell (SCP02[03]:00:MF/ADF.ISD)> get_data key_information
|
||||||
|
{
|
||||||
|
"key_information": [
|
||||||
|
{
|
||||||
|
"key_information_data": {
|
||||||
|
"key_identifier": 1,
|
||||||
|
"key_version_number": 3,
|
||||||
|
"key_types": [
|
||||||
|
{
|
||||||
|
"type": "aes",
|
||||||
|
"length": 16
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_information_data": {
|
||||||
|
"key_identifier": 2,
|
||||||
|
"key_version_number": 3,
|
||||||
|
"key_types": [
|
||||||
|
{
|
||||||
|
"type": "aes",
|
||||||
|
"length": 16
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_information_data": {
|
||||||
|
"key_identifier": 3,
|
||||||
|
"key_version_number": 3,
|
||||||
|
"key_types": [
|
||||||
|
{
|
||||||
|
"type": "aes",
|
||||||
|
"length": 16
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
...
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Example: `AES256` key for `SCP03`
|
||||||
|
---------------------------------
|
||||||
|
|
||||||
|
Let's assume we want to provision a new `AES256` keyset that we can use for SCP03. The keyset shall look like this:
|
||||||
|
|
||||||
|
+----------------+---------+------------------------------------------------------------------+
|
||||||
|
| Key Identifier | Keyname | Keyvalue |
|
||||||
|
+================+=========+==================================================================+
|
||||||
|
| 1 | ENC/KIC | 542C37A6043679F2F9F71116418B1CD5542C37A6043679F2F9F71116418B1CD5 |
|
||||||
|
+----------------+---------+------------------------------------------------------------------+
|
||||||
|
| 2 | MAC/KID | 34F11BAC8E5390B57F4E601372339E3C34F11BAC8E5390B57F4E601372339E3C |
|
||||||
|
+----------------+---------+------------------------------------------------------------------+
|
||||||
|
| 3 | DEK/KIK | 5524F4BECFE96FB63FC29D6BAAC6058B5524F4BECFE96FB63FC29D6BAAC6058B |
|
||||||
|
+----------------+---------+------------------------------------------------------------------+
|
||||||
|
|
||||||
|
In addition to that, we assume that we want to associate this key with KVN 51. This KVN number falls in the range of
|
||||||
|
48 - 63 and is therefore suitable for a key that shall be usable with SCP03. We also made sure before that KVN 51 is
|
||||||
|
still unused.
|
||||||
|
|
||||||
|
With that we can go ahead and make up the following commandline:
|
||||||
|
::
|
||||||
|
|
||||||
|
pySIM-shell (SCP02[03]:00:MF/ADF.ISD)> put_key --key-id 1 --key-type aes --key-data 542C37A6043679F2F9F71116418B1CD5542C37A6043679F2F9F71116418B1CD5 --key-type aes --key-data 34F11BAC8E5390B57F4E601372339E3C34F11BAC8E5390B57F4E601372339E3C --key-type aes --key-data 5524F4BECFE96FB63FC29D6BAAC6058B5524F4BECFE96FB63FC29D6BAAC6058B --key-version-nr 51
|
||||||
|
|
||||||
|
In case of success, we should see the keyset in the `key_information`
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
pySIM-shell (SCP02[03]:00:MF/ADF.ISD)> get_data key_information
|
||||||
|
{
|
||||||
|
"key_information": [
|
||||||
|
{
|
||||||
|
"key_information_data": {
|
||||||
|
"key_identifier": 1,
|
||||||
|
"key_version_number": 51,
|
||||||
|
"key_types": [
|
||||||
|
{
|
||||||
|
"type": "aes",
|
||||||
|
"length": 32
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_information_data": {
|
||||||
|
"key_identifier": 2,
|
||||||
|
"key_version_number": 51,
|
||||||
|
"key_types": [
|
||||||
|
{
|
||||||
|
"type": "aes",
|
||||||
|
"length": 32
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_information_data": {
|
||||||
|
"key_identifier": 3,
|
||||||
|
"key_version_number": 51,
|
||||||
|
"key_types": [
|
||||||
|
{
|
||||||
|
"type": "aes",
|
||||||
|
"length": 32
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
...
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Example: `AES128` key for `SCP81`
|
||||||
|
---------------------------------
|
||||||
|
|
||||||
|
In this example we will show how to provision a new `AES128` keyset for `SCP81`. We will provision this keyset under
|
||||||
|
KVN 64. The keyset we intend to apply shall look like this:
|
||||||
|
|
||||||
|
+----------------+---------+----------------------------------+
|
||||||
|
| Key Identifier | Keyname | Keyvalue |
|
||||||
|
+================+=========+==================================+
|
||||||
|
| 1 | TLS-PSK | 000102030405060708090a0b0c0d0e0f |
|
||||||
|
+----------------+---------+----------------------------------+
|
||||||
|
| 2 | DEK/KIK | 000102030405060708090a0b0c0d0e0f |
|
||||||
|
+----------------+---------+----------------------------------+
|
||||||
|
|
||||||
|
With that we can put together the following command line:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
put_key --key-id 1 --key-type tls_psk --key-data 000102030405060708090a0b0c0d0e0f --key-type aes --key-data 000102030405060708090a0b0c0d0e0f --key-version-nr 64
|
||||||
|
|
||||||
|
In case of success, the keyset should appear in the `key_information` as follows:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
pySIM-shell (SCP03[03]:00:MF/ADF.ISD-R)> get_data key_information
|
||||||
|
{
|
||||||
|
"key_information": [
|
||||||
|
...,
|
||||||
|
{
|
||||||
|
"key_information_data": {
|
||||||
|
"key_identifier": 2,
|
||||||
|
"key_version_number": 64,
|
||||||
|
"key_types": [
|
||||||
|
{
|
||||||
|
"type": "aes",
|
||||||
|
"length": 16
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key_information_data": {
|
||||||
|
"key_identifier": 1,
|
||||||
|
"key_version_number": 64,
|
||||||
|
"key_types": [
|
||||||
|
{
|
||||||
|
"type": "tls_psk",
|
||||||
|
"length": 16
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
267
docs/pysim_fs_sphinx.py
Normal file
267
docs/pysim_fs_sphinx.py
Normal file
@@ -0,0 +1,267 @@
|
|||||||
|
"""
|
||||||
|
Sphinx extension: auto-generate docs/filesystem.rst from the pySim EF class hierarchy.
|
||||||
|
|
||||||
|
Hooked into Sphinx's ``builder-inited`` event so the file is always regenerated
|
||||||
|
from the live Python classes before Sphinx reads any source files.
|
||||||
|
|
||||||
|
The table of root objects to document is in SECTIONS near the top of this file.
|
||||||
|
EXCLUDED lists CardProfile/CardApplication subclasses intentionally omitted from
|
||||||
|
SECTIONS, with reasons. Both tables are read by tests/unittests/test_fs_coverage.py
|
||||||
|
to ensure every class with EF/DF content is accounted for.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import importlib
|
||||||
|
import inspect
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import textwrap
|
||||||
|
|
||||||
|
# Ensure pySim is importable when this module is loaded as a Sphinx extension
|
||||||
|
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
|
||||||
|
|
||||||
|
from pySim.filesystem import (CardApplication, CardDF, CardMF, CardEF, # noqa: E402
|
||||||
|
TransparentEF, TransRecEF, LinFixedEF, CyclicEF, BerTlvEF)
|
||||||
|
from pySim.profile import CardProfile # noqa: E402
|
||||||
|
|
||||||
|
|
||||||
|
# Generic EF base classes whose docstrings describe the *type* of file
|
||||||
|
# (Transparent, LinFixed, ...) rather than a specific file's content.
|
||||||
|
# Suppress those boilerplate texts in the per-EF entries; they are only
|
||||||
|
# useful once, at the top of the document or in a dedicated glossary.
|
||||||
|
_EF_BASE_TYPES = frozenset([TransparentEF,
|
||||||
|
TransRecEF,
|
||||||
|
LinFixedEF,
|
||||||
|
CyclicEF,
|
||||||
|
BerTlvEF])
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Sections: (heading, module, class-name)
|
||||||
|
# The class must be either a CardProfile (uses .files_in_mf) or a CardDF
|
||||||
|
# subclass (uses .children).
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
SECTIONS = [
|
||||||
|
('MF / TS 102 221 (UICC)',
|
||||||
|
'pySim.ts_102_221', 'CardProfileUICC'),
|
||||||
|
('ADF.USIM / TS 31.102',
|
||||||
|
'pySim.ts_31_102', 'ADF_USIM'),
|
||||||
|
('ADF.ISIM / TS 31.103',
|
||||||
|
'pySim.ts_31_103', 'ADF_ISIM'),
|
||||||
|
('ADF.HPSIM / TS 31.104',
|
||||||
|
'pySim.ts_31_104', 'ADF_HPSIM'),
|
||||||
|
('DF.GSM + DF.TELECOM / TS 51.011 (SIM)',
|
||||||
|
'pySim.ts_51_011', 'CardProfileSIM'),
|
||||||
|
('CDMA / IS-820 (RUIM)',
|
||||||
|
'pySim.cdma_ruim', 'CardProfileRUIM'),
|
||||||
|
('DF.EIRENE / GSM-R',
|
||||||
|
'pySim.gsm_r', 'DF_EIRENE'),
|
||||||
|
('DF.SYSTEM / sysmocom SJA2+SJA5',
|
||||||
|
'pySim.sysmocom_sja2', 'DF_SYSTEM'),
|
||||||
|
]
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Excluded: {(module, class-name)}
|
||||||
|
# CardProfile and CardApplication subclasses that have EF/DF children but are
|
||||||
|
# intentionally absent from SECTIONS. Keeping this list explicit lets
|
||||||
|
# test_fs_coverage.py detect newly added classes that the developer forgot to
|
||||||
|
# add to either table.
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
EXCLUDED = {
|
||||||
|
# eUICC profiles inherit files_in_mf verbatim from CardProfileUICC; the
|
||||||
|
# eUICC-specific content lives in ISD-R / ISD-P applications, not in MF.
|
||||||
|
('pySim.euicc', 'CardProfileEuiccSGP02'),
|
||||||
|
('pySim.euicc', 'CardProfileEuiccSGP22'),
|
||||||
|
('pySim.euicc', 'CardProfileEuiccSGP32'),
|
||||||
|
# CardApplication* classes are thin wrappers that embed an ADF_* instance.
|
||||||
|
# The ADF contents are already documented via the corresponding ADF_* entry
|
||||||
|
# in SECTIONS above.
|
||||||
|
('pySim.ts_31_102', 'CardApplicationUSIM'),
|
||||||
|
('pySim.ts_31_102', 'CardApplicationUSIMnonIMSI'),
|
||||||
|
('pySim.ts_31_103', 'CardApplicationISIM'),
|
||||||
|
('pySim.ts_31_104', 'CardApplicationHPSIM'),
|
||||||
|
}
|
||||||
|
|
||||||
|
# RST underline characters ordered by nesting depth
|
||||||
|
_HEADING_CHARS = ['=', '=', '-', '~', '^', '"']
|
||||||
|
# Level 0 uses '=' with overline (page title).
|
||||||
|
# Level 1 uses '=' without overline (major sections).
|
||||||
|
# Levels 2+ use the remaining characters for DFs.
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# RST formatting helpers
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _heading(title: str, level: int) -> str:
|
||||||
|
"""Return an RST heading string. Level 0 gets an overline."""
|
||||||
|
char = _HEADING_CHARS[level]
|
||||||
|
rule = char * len(title)
|
||||||
|
if level == 0:
|
||||||
|
return f'{rule}\n{title}\n{rule}\n\n'
|
||||||
|
return f'{title}\n{rule}\n\n'
|
||||||
|
|
||||||
|
|
||||||
|
def _json_default(obj):
|
||||||
|
"""Fallback serialiser: bytes -> hex, anything else -> repr."""
|
||||||
|
if isinstance(obj, (bytes, bytearray)):
|
||||||
|
return obj.hex()
|
||||||
|
return repr(obj)
|
||||||
|
|
||||||
|
|
||||||
|
def _examples_block(cls) -> str:
|
||||||
|
"""Return RST code-block examples (one per vector), or '' if none exist.
|
||||||
|
|
||||||
|
Each example is rendered as a ``json5`` code-block with the hex-encoded
|
||||||
|
binary as a ``// comment`` on the first line, followed by the decoded JSON.
|
||||||
|
``json5`` is used instead of ``json`` so that Pygments does not flag the
|
||||||
|
``//`` comment as a syntax error.
|
||||||
|
"""
|
||||||
|
vectors = []
|
||||||
|
for attr in ('_test_de_encode', '_test_decode'):
|
||||||
|
v = getattr(cls, attr, None)
|
||||||
|
if v:
|
||||||
|
vectors.extend(v)
|
||||||
|
if not vectors:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
lines = ['**Examples**\n\n']
|
||||||
|
|
||||||
|
for t in vectors:
|
||||||
|
# 2-tuple: (encoded, decoded)
|
||||||
|
# 3-tuple: (encoded, record_nr, decoded) — LinFixedEF / CyclicEF
|
||||||
|
if len(t) >= 3:
|
||||||
|
encoded, record_nr, decoded = t[0], t[1], t[2]
|
||||||
|
comment = f'record {record_nr}: {encoded.lower()}'
|
||||||
|
else:
|
||||||
|
encoded, decoded = t[0], t[1]
|
||||||
|
comment = f'file: {encoded.lower()}'
|
||||||
|
|
||||||
|
json_str = json.dumps(decoded, default=_json_default, indent=2)
|
||||||
|
json_indented = textwrap.indent(json_str, ' ')
|
||||||
|
|
||||||
|
lines.append('.. code-block:: json5\n\n')
|
||||||
|
lines.append(f' // {comment}\n')
|
||||||
|
lines.append(json_indented + '\n')
|
||||||
|
lines.append('\n')
|
||||||
|
|
||||||
|
return ''.join(lines)
|
||||||
|
|
||||||
|
|
||||||
|
def _document_ef(ef: CardEF) -> str:
|
||||||
|
"""Return RST for a single EF. Uses ``rubric`` to stay out of the TOC."""
|
||||||
|
cls = type(ef)
|
||||||
|
|
||||||
|
parts = [ef.fully_qualified_path_str()]
|
||||||
|
if ef.fid:
|
||||||
|
parts.append(f'({ef.fid.upper()})')
|
||||||
|
if ef.desc:
|
||||||
|
parts.append(f'\u2014 {ef.desc}') # em-dash
|
||||||
|
title = ' '.join(parts)
|
||||||
|
|
||||||
|
lines = [f'.. rubric:: {title}\n\n']
|
||||||
|
|
||||||
|
# Only show a docstring if it is specific to this class. EFs that are
|
||||||
|
# direct instances of a base type (TransparentEF, LinFixedEF, ...) carry
|
||||||
|
# only the generic "what is a TransparentEF" boilerplate; named subclasses
|
||||||
|
# without their own __doc__ have cls.__dict__['__doc__'] == None. Either
|
||||||
|
# way, suppress the text here - it belongs at the document level, not
|
||||||
|
# repeated for every single EF entry.
|
||||||
|
doc = None if cls in _EF_BASE_TYPES else cls.__dict__.get('__doc__')
|
||||||
|
if doc:
|
||||||
|
lines.append(inspect.cleandoc(doc) + '\n\n')
|
||||||
|
|
||||||
|
examples = _examples_block(cls)
|
||||||
|
if examples:
|
||||||
|
lines.append(examples)
|
||||||
|
|
||||||
|
return ''.join(lines)
|
||||||
|
|
||||||
|
|
||||||
|
def _document_df(df: CardDF, level: int) -> str:
|
||||||
|
"""Return RST for a DF section and all its children, recursively."""
|
||||||
|
parts = [df.fully_qualified_path_str()]
|
||||||
|
if df.fid:
|
||||||
|
parts.append(f'({df.fid.upper()})')
|
||||||
|
if df.desc:
|
||||||
|
parts.append(f'\u2014 {df.desc}') # em-dash
|
||||||
|
title = ' '.join(parts)
|
||||||
|
|
||||||
|
lines = [_heading(title, level)]
|
||||||
|
|
||||||
|
cls = type(df)
|
||||||
|
doc = None if cls in (CardDF, CardMF) else cls.__dict__.get('__doc__')
|
||||||
|
if doc:
|
||||||
|
lines.append(inspect.cleandoc(doc) + '\n\n')
|
||||||
|
|
||||||
|
for child in df.children.values():
|
||||||
|
if isinstance(child, CardDF):
|
||||||
|
lines.append(_document_df(child, level + 1))
|
||||||
|
elif isinstance(child, CardEF):
|
||||||
|
lines.append(_document_ef(child))
|
||||||
|
|
||||||
|
return ''.join(lines)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Top-level generator
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def generate_filesystem_rst() -> str:
|
||||||
|
"""Walk all registered sections and return the full RST document as a string."""
|
||||||
|
out = [
|
||||||
|
'.. This file is auto-generated by docs/pysim_fs_sphinx.py — do not edit.\n\n',
|
||||||
|
_heading('Card Filesystem Reference', 0),
|
||||||
|
'This page documents all Elementary Files (EFs) and Dedicated Files (DFs) '
|
||||||
|
'implemented in pySim, organised by their location in the card filesystem.\n\n',
|
||||||
|
]
|
||||||
|
|
||||||
|
# Track already-documented classes so that DFs/EFs shared between profiles
|
||||||
|
# (e.g. DF.TELECOM / DF.GSM present in both CardProfileSIM and CardProfileRUIM)
|
||||||
|
# are only emitted once.
|
||||||
|
seen_types: set = set()
|
||||||
|
|
||||||
|
for section_title, module_path, class_name in SECTIONS:
|
||||||
|
module = importlib.import_module(module_path)
|
||||||
|
cls = getattr(module, class_name)
|
||||||
|
obj = cls()
|
||||||
|
|
||||||
|
if isinstance(obj, CardProfile):
|
||||||
|
files = obj.files_in_mf
|
||||||
|
elif isinstance(obj, CardApplication):
|
||||||
|
files = list(obj.adf.children.values())
|
||||||
|
elif isinstance(obj, CardDF):
|
||||||
|
files = list(obj.children.values())
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Filter out files whose class was already documented in an earlier section.
|
||||||
|
files = [f for f in files if type(f) not in seen_types]
|
||||||
|
if not files:
|
||||||
|
continue
|
||||||
|
|
||||||
|
out.append(_heading(section_title, 1))
|
||||||
|
|
||||||
|
for f in files:
|
||||||
|
seen_types.add(type(f))
|
||||||
|
if isinstance(f, CardDF):
|
||||||
|
out.append(_document_df(f, level=2))
|
||||||
|
elif isinstance(f, CardEF):
|
||||||
|
out.append(_document_ef(f))
|
||||||
|
|
||||||
|
return ''.join(out)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Sphinx integration
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _on_builder_inited(app):
|
||||||
|
output_path = os.path.join(app.srcdir, 'filesystem.rst')
|
||||||
|
with open(output_path, 'w') as fh:
|
||||||
|
fh.write(generate_filesystem_rst())
|
||||||
|
|
||||||
|
|
||||||
|
def setup(app):
|
||||||
|
app.connect('builder-inited', _on_builder_inited)
|
||||||
|
return {'version': '0.1', 'parallel_read_safe': True}
|
||||||
@@ -67,7 +67,7 @@ Inspecting applications
|
|||||||
|
|
||||||
To inspect the application PE contents of an existing profile package, sub-command `info` with parameter '--apps' can
|
To inspect the application PE contents of an existing profile package, sub-command `info` with parameter '--apps' can
|
||||||
be used. This command lists out all application and their parameters in detail. This allows an application developer
|
be used. This command lists out all application and their parameters in detail. This allows an application developer
|
||||||
to check if the applet insertaion was carried out as expected.
|
to check if the applet insertion was carried out as expected.
|
||||||
|
|
||||||
Example: Listing applications and their parameters
|
Example: Listing applications and their parameters
|
||||||
::
|
::
|
||||||
|
|||||||
@@ -68,7 +68,7 @@ Usage Examples
|
|||||||
|
|
||||||
suci-tutorial
|
suci-tutorial
|
||||||
cap-tutorial
|
cap-tutorial
|
||||||
|
put_key-tutorial
|
||||||
|
|
||||||
Advanced Topics
|
Advanced Topics
|
||||||
---------------
|
---------------
|
||||||
@@ -602,8 +602,8 @@ This allows for easy interactive modification of records.
|
|||||||
If this command fails before the editor is spawned, it means that the current record contents is not decodable,
|
If this command fails before the editor is spawned, it means that the current record contents is not decodable,
|
||||||
and you should use the :ref:`update_record_decoded` or :ref:`update_record` command.
|
and you should use the :ref:`update_record_decoded` or :ref:`update_record` command.
|
||||||
|
|
||||||
If this command fails after making your modificatiosn in the editor, it means that the new file contents is not
|
If this command fails after making your modifications in the editor, it means that the new file contents is not
|
||||||
encodable; please check your input and/or us the raw :ref:`update_record` comamdn.
|
encodable; please check your input and/or use the raw :ref:`update_record` command.
|
||||||
|
|
||||||
|
|
||||||
decode_hex
|
decode_hex
|
||||||
@@ -708,8 +708,8 @@ This allows for easy interactive modification of file contents.
|
|||||||
If this command fails before the editor is spawned, it means that the current file contents is not decodable,
|
If this command fails before the editor is spawned, it means that the current file contents is not decodable,
|
||||||
and you should use the :ref:`update_binary_decoded` or :ref:`update_binary` command.
|
and you should use the :ref:`update_binary_decoded` or :ref:`update_binary` command.
|
||||||
|
|
||||||
If this command fails after making your modificatiosn in the editor, it means that the new file contents is not
|
If this command fails after making your modifications in the editor, it means that the new file contents is not
|
||||||
encodable; please check your input and/or us the raw :ref:`update_binary` comamdn.
|
encodable; please check your input and/or use the raw :ref:`update_binary` command.
|
||||||
|
|
||||||
|
|
||||||
decode_hex
|
decode_hex
|
||||||
|
|||||||
@@ -44,6 +44,11 @@ from pySim.legacy.ts_51_011 import EF
|
|||||||
from pySim.card_handler import *
|
from pySim.card_handler import *
|
||||||
from pySim.utils import *
|
from pySim.utils import *
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
import logging
|
||||||
|
from pySim.log import PySimLogger
|
||||||
|
|
||||||
|
log = PySimLogger.get(Path(__file__).stem)
|
||||||
|
|
||||||
def parse_options():
|
def parse_options():
|
||||||
|
|
||||||
@@ -185,6 +190,7 @@ def parse_options():
|
|||||||
default=False, action="store_true")
|
default=False, action="store_true")
|
||||||
parser.add_argument("--card_handler", dest="card_handler_config", metavar="FILE",
|
parser.add_argument("--card_handler", dest="card_handler_config", metavar="FILE",
|
||||||
help="Use automatic card handling machine")
|
help="Use automatic card handling machine")
|
||||||
|
parser.add_argument("--verbose", help="Enable verbose logging", action='store_true', default=False)
|
||||||
|
|
||||||
options = parser.parse_args()
|
options = parser.parse_args()
|
||||||
|
|
||||||
@@ -770,6 +776,9 @@ if __name__ == '__main__':
|
|||||||
# Parse options
|
# Parse options
|
||||||
opts = parse_options()
|
opts = parse_options()
|
||||||
|
|
||||||
|
# Setup logger
|
||||||
|
PySimLogger.setup(print, {logging.WARN: "\033[33m"}, opts.verbose)
|
||||||
|
|
||||||
# Init card reader driver
|
# Init card reader driver
|
||||||
sl = init_reader(opts)
|
sl = init_reader(opts)
|
||||||
|
|
||||||
|
|||||||
@@ -25,7 +25,6 @@
|
|||||||
import hashlib
|
import hashlib
|
||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
import random
|
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
@@ -46,11 +45,17 @@ from pySim.utils import dec_imsi, dec_iccid
|
|||||||
from pySim.legacy.utils import format_xplmn_w_act, dec_st, dec_msisdn
|
from pySim.legacy.utils import format_xplmn_w_act, dec_st, dec_msisdn
|
||||||
from pySim.ts_51_011 import EF_SMSP
|
from pySim.ts_51_011 import EF_SMSP
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
import logging
|
||||||
|
from pySim.log import PySimLogger
|
||||||
|
|
||||||
|
log = PySimLogger.get(Path(__file__).stem)
|
||||||
|
|
||||||
option_parser = argparse.ArgumentParser(description='Legacy tool for reading some parts of a SIM card',
|
option_parser = argparse.ArgumentParser(description='Legacy tool for reading some parts of a SIM card',
|
||||||
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||||
|
option_parser.add_argument("--verbose", help="Enable verbose logging", action='store_true', default=False)
|
||||||
argparse_add_reader_args(option_parser)
|
argparse_add_reader_args(option_parser)
|
||||||
|
|
||||||
|
|
||||||
def select_app(adf: str, card: SimCard):
|
def select_app(adf: str, card: SimCard):
|
||||||
"""Select application by its AID"""
|
"""Select application by its AID"""
|
||||||
sw = 0
|
sw = 0
|
||||||
@@ -75,6 +80,9 @@ if __name__ == '__main__':
|
|||||||
# Parse options
|
# Parse options
|
||||||
opts = option_parser.parse_args()
|
opts = option_parser.parse_args()
|
||||||
|
|
||||||
|
# Setup logger
|
||||||
|
PySimLogger.setup(print, {logging.WARN: "\033[33m"}, opts.verbose)
|
||||||
|
|
||||||
# Init card reader driver
|
# Init card reader driver
|
||||||
sl = init_reader(opts)
|
sl = init_reader(opts)
|
||||||
|
|
||||||
|
|||||||
@@ -107,12 +107,12 @@ Online manual available at https://downloads.osmocom.org/docs/pysim/master/html/
|
|||||||
kwargs = {'include_ipy': True}
|
kwargs = {'include_ipy': True}
|
||||||
|
|
||||||
self.verbose = verbose
|
self.verbose = verbose
|
||||||
self._onchange_verbose('verbose', False, self.verbose);
|
PySimLogger.setup(self.poutput, {logging.WARN: YELLOW})
|
||||||
|
self._onchange_verbose('verbose', False, self.verbose)
|
||||||
|
|
||||||
# pylint: disable=unexpected-keyword-arg
|
# pylint: disable=unexpected-keyword-arg
|
||||||
super().__init__(persistent_history_file='~/.pysim_shell_history', allow_cli_args=False,
|
super().__init__(persistent_history_file='~/.pysim_shell_history', allow_cli_args=False,
|
||||||
auto_load_commands=False, startup_script=script, **kwargs)
|
auto_load_commands=False, startup_script=script, **kwargs)
|
||||||
PySimLogger.setup(self.poutput, {logging.WARN: YELLOW})
|
|
||||||
self.intro = style(self.BANNER, fg=RED)
|
self.intro = style(self.BANNER, fg=RED)
|
||||||
self.default_category = 'pySim-shell built-in commands'
|
self.default_category = 'pySim-shell built-in commands'
|
||||||
self.card = None
|
self.card = None
|
||||||
@@ -136,8 +136,7 @@ Online manual available at https://downloads.osmocom.org/docs/pysim/master/html/
|
|||||||
self.add_settable(Settable2Compat('apdu_trace', bool, 'Trace and display APDUs exchanged with card', self,
|
self.add_settable(Settable2Compat('apdu_trace', bool, 'Trace and display APDUs exchanged with card', self,
|
||||||
onchange_cb=self._onchange_apdu_trace))
|
onchange_cb=self._onchange_apdu_trace))
|
||||||
self.add_settable(Settable2Compat('apdu_strict', bool,
|
self.add_settable(Settable2Compat('apdu_strict', bool,
|
||||||
'Enforce APDU responses according to ISO/IEC 7816-3, table 12', self,
|
'Strictly apply APDU format according to ISO/IEC 7816-3, table 12', self))
|
||||||
onchange_cb=self._onchange_apdu_strict))
|
|
||||||
self.add_settable(Settable2Compat('verbose', bool,
|
self.add_settable(Settable2Compat('verbose', bool,
|
||||||
'Enable/disable verbose logging', self,
|
'Enable/disable verbose logging', self,
|
||||||
onchange_cb=self._onchange_verbose))
|
onchange_cb=self._onchange_verbose))
|
||||||
@@ -218,13 +217,6 @@ Online manual available at https://downloads.osmocom.org/docs/pysim/master/html/
|
|||||||
else:
|
else:
|
||||||
self.card._scc._tp.apdu_tracer = None
|
self.card._scc._tp.apdu_tracer = None
|
||||||
|
|
||||||
def _onchange_apdu_strict(self, param_name, old, new):
|
|
||||||
if self.card:
|
|
||||||
if new == True:
|
|
||||||
self.card._scc._tp.apdu_strict = True
|
|
||||||
else:
|
|
||||||
self.card._scc._tp.apdu_strict = False
|
|
||||||
|
|
||||||
def _onchange_verbose(self, param_name, old, new):
|
def _onchange_verbose(self, param_name, old, new):
|
||||||
PySimLogger.set_verbose(new)
|
PySimLogger.set_verbose(new)
|
||||||
if new == True:
|
if new == True:
|
||||||
@@ -281,7 +273,7 @@ Online manual available at https://downloads.osmocom.org/docs/pysim/master/html/
|
|||||||
apdu_cmd_parser.add_argument('--expect-sw', help='expect a specified status word', type=str, default=None)
|
apdu_cmd_parser.add_argument('--expect-sw', help='expect a specified status word', type=str, default=None)
|
||||||
apdu_cmd_parser.add_argument('--expect-response-regex', help='match response against regex', type=str, default=None)
|
apdu_cmd_parser.add_argument('--expect-response-regex', help='match response against regex', type=str, default=None)
|
||||||
apdu_cmd_parser.add_argument('--raw', help='Bypass the logical channel (and secure channel)', action='store_true')
|
apdu_cmd_parser.add_argument('--raw', help='Bypass the logical channel (and secure channel)', action='store_true')
|
||||||
apdu_cmd_parser.add_argument('APDU', type=is_hexstr, help='APDU as hex string')
|
apdu_cmd_parser.add_argument('APDU', type=is_hexstr, help='APDU as hex string (see also: ISO/IEC 7816-3, section 12.1')
|
||||||
|
|
||||||
@cmd2.with_argparser(apdu_cmd_parser)
|
@cmd2.with_argparser(apdu_cmd_parser)
|
||||||
def do_apdu(self, opts):
|
def do_apdu(self, opts):
|
||||||
@@ -290,14 +282,23 @@ Online manual available at https://downloads.osmocom.org/docs/pysim/master/html/
|
|||||||
tracked. Depending on the raw APDU sent, pySim-shell may not continue to work as expected if you e.g. select
|
tracked. Depending on the raw APDU sent, pySim-shell may not continue to work as expected if you e.g. select
|
||||||
a different file."""
|
a different file."""
|
||||||
|
|
||||||
|
if not hasattr(self, 'apdu_strict_warning_displayed') and self.apdu_strict is False:
|
||||||
|
self.poutput("Warning: The default for the setable parameter `apdu_strict` will be changed from")
|
||||||
|
self.poutput(" `False` to `True` in future pySim-shell releases. In case you are using")
|
||||||
|
self.poutput(" the `apdu` command from a script that still mixes APDUs with TPDUs, consider")
|
||||||
|
self.poutput(" fixing or adding a `set apdu_strict false` line at the beginning.")
|
||||||
|
self.apdu_strict_warning_displayed = True;
|
||||||
|
|
||||||
# When sending raw APDUs we access the scc object through _scc member of the card object. It should also be
|
# When sending raw APDUs we access the scc object through _scc member of the card object. It should also be
|
||||||
# noted that the apdu command plays an exceptional role since it is the only card accessing command that
|
# noted that the apdu command plays an exceptional role since it is the only card accessing command that
|
||||||
# can be executed without the presence of a runtime state (self.rs) object. However, this also means that
|
# can be executed without the presence of a runtime state (self.rs) object. However, this also means that
|
||||||
# self.lchan is also not present (see method equip).
|
# self.lchan is also not present (see method equip).
|
||||||
|
self.card._scc._tp.apdu_strict = self.apdu_strict
|
||||||
if opts.raw or self.lchan is None:
|
if opts.raw or self.lchan is None:
|
||||||
data, sw = self.card._scc.send_apdu(opts.APDU, apply_lchan = False)
|
data, sw = self.card._scc.send_apdu(opts.APDU, apply_lchan = False)
|
||||||
else:
|
else:
|
||||||
data, sw = self.lchan.scc.send_apdu(opts.APDU, apply_lchan = False)
|
data, sw = self.lchan.scc.send_apdu(opts.APDU, apply_lchan = False)
|
||||||
|
self.card._scc._tp.apdu_strict = True
|
||||||
if data:
|
if data:
|
||||||
self.poutput("SW: %s, RESP: %s" % (sw, data))
|
self.poutput("SW: %s, RESP: %s" % (sw, data))
|
||||||
else:
|
else:
|
||||||
@@ -1175,13 +1176,7 @@ if __name__ == '__main__':
|
|||||||
opts = option_parser.parse_args()
|
opts = option_parser.parse_args()
|
||||||
|
|
||||||
# Ensure that we are able to print formatted warnings from the beginning.
|
# Ensure that we are able to print formatted warnings from the beginning.
|
||||||
PySimLogger.setup(print, {logging.WARN: YELLOW})
|
PySimLogger.setup(print, {logging.WARN: YELLOW}, opts.verbose)
|
||||||
if opts.verbose:
|
|
||||||
PySimLogger.set_verbose(True)
|
|
||||||
PySimLogger.set_level(logging.DEBUG)
|
|
||||||
else:
|
|
||||||
PySimLogger.set_verbose(False)
|
|
||||||
PySimLogger.set_level(logging.INFO)
|
|
||||||
|
|
||||||
# Register csv-file as card data provider, either from specified CSV
|
# Register csv-file as card data provider, either from specified CSV
|
||||||
# or from CSV file in home directory
|
# or from CSV file in home directory
|
||||||
|
|||||||
@@ -72,10 +72,10 @@ class ApduArDO(BER_TLV_IE, tag=0xd0):
|
|||||||
if do[0] == 0x01:
|
if do[0] == 0x01:
|
||||||
self.decoded = {'generic_access_rule': 'always'}
|
self.decoded = {'generic_access_rule': 'always'}
|
||||||
return self.decoded
|
return self.decoded
|
||||||
return ValueError('Invalid 1-byte generic APDU access rule')
|
raise ValueError('Invalid 1-byte generic APDU access rule')
|
||||||
else:
|
else:
|
||||||
if len(do) % 8:
|
if len(do) % 8:
|
||||||
return ValueError('Invalid non-modulo-8 length of APDU filter: %d' % len(do))
|
raise ValueError('Invalid non-modulo-8 length of APDU filter: %d' % len(do))
|
||||||
self.decoded = {'apdu_filter': []}
|
self.decoded = {'apdu_filter': []}
|
||||||
offset = 0
|
offset = 0
|
||||||
while offset < len(do):
|
while offset < len(do):
|
||||||
@@ -90,19 +90,19 @@ class ApduArDO(BER_TLV_IE, tag=0xd0):
|
|||||||
return b'\x00'
|
return b'\x00'
|
||||||
if self.decoded['generic_access_rule'] == 'always':
|
if self.decoded['generic_access_rule'] == 'always':
|
||||||
return b'\x01'
|
return b'\x01'
|
||||||
return ValueError('Invalid 1-byte generic APDU access rule')
|
raise ValueError('Invalid 1-byte generic APDU access rule')
|
||||||
else:
|
else:
|
||||||
if not 'apdu_filter' in self.decoded:
|
if not 'apdu_filter' in self.decoded:
|
||||||
return ValueError('Invalid APDU AR DO')
|
raise ValueError('Invalid APDU AR DO')
|
||||||
filters = self.decoded['apdu_filter']
|
filters = self.decoded['apdu_filter']
|
||||||
res = b''
|
res = b''
|
||||||
for f in filters:
|
for f in filters:
|
||||||
if not 'header' in f or not 'mask' in f:
|
if not 'header' in f or not 'mask' in f:
|
||||||
return ValueError('APDU filter must contain header and mask')
|
raise ValueError('APDU filter must contain header and mask')
|
||||||
header_b = h2b(f['header'])
|
header_b = h2b(f['header'])
|
||||||
mask_b = h2b(f['mask'])
|
mask_b = h2b(f['mask'])
|
||||||
if len(header_b) != 4 or len(mask_b) != 4:
|
if len(header_b) != 4 or len(mask_b) != 4:
|
||||||
return ValueError('APDU filter header and mask must each be 4 bytes')
|
raise ValueError('APDU filter header and mask must each be 4 bytes')
|
||||||
res += header_b + mask_b
|
res += header_b + mask_b
|
||||||
return res
|
return res
|
||||||
|
|
||||||
@@ -269,7 +269,7 @@ class ADF_ARAM(CardADF):
|
|||||||
cmd_do_enc = cmd_do.to_ie()
|
cmd_do_enc = cmd_do.to_ie()
|
||||||
cmd_do_len = len(cmd_do_enc)
|
cmd_do_len = len(cmd_do_enc)
|
||||||
if cmd_do_len > 255:
|
if cmd_do_len > 255:
|
||||||
return ValueError('DO > 255 bytes not supported yet')
|
raise ValueError('DO > 255 bytes not supported yet')
|
||||||
else:
|
else:
|
||||||
cmd_do_enc = b''
|
cmd_do_enc = b''
|
||||||
cmd_do_len = 0
|
cmd_do_len = 0
|
||||||
@@ -361,7 +361,7 @@ class ADF_ARAM(CardADF):
|
|||||||
ar_do_content += [{'apdu_ar_do': {'generic_access_rule': 'always'}}]
|
ar_do_content += [{'apdu_ar_do': {'generic_access_rule': 'always'}}]
|
||||||
elif opts.apdu_filter:
|
elif opts.apdu_filter:
|
||||||
if len(opts.apdu_filter) % 16:
|
if len(opts.apdu_filter) % 16:
|
||||||
return ValueError('Invalid non-modulo-16 length of APDU filter: %d' % len(do))
|
raise ValueError(f'Invalid non-modulo-16 length of APDU filter: {len(opts.apdu_filter)}')
|
||||||
offset = 0
|
offset = 0
|
||||||
apdu_filter = []
|
apdu_filter = []
|
||||||
while offset < len(opts.apdu_filter):
|
while offset < len(opts.apdu_filter):
|
||||||
|
|||||||
@@ -128,10 +128,10 @@ class EF_AD(TransparentEF):
|
|||||||
cell_test = 0x04
|
cell_test = 0x04
|
||||||
|
|
||||||
def __init__(self, fid='6f43', sfid=None, name='EF.AD',
|
def __init__(self, fid='6f43', sfid=None, name='EF.AD',
|
||||||
desc='Service Provider Name', size=(3, None), **kwargs):
|
desc='Administrative Data', size=(3, None), **kwargs):
|
||||||
super().__init__(fid, sfid=sfid, name=name, desc=desc, size=size, **kwargs)
|
super().__init__(fid, sfid=sfid, name=name, desc=desc, size=size, **kwargs)
|
||||||
self._construct = Struct(
|
self._construct = Struct(
|
||||||
# Byte 1: Display Condition
|
# Byte 1: MS operation mode
|
||||||
'ms_operation_mode'/Enum(Byte, self.OP_MODE),
|
'ms_operation_mode'/Enum(Byte, self.OP_MODE),
|
||||||
# Bytes 2-3: Additional information
|
# Bytes 2-3: Additional information
|
||||||
'additional_info'/Bytes(2),
|
'additional_info'/Bytes(2),
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ import abc
|
|||||||
import requests
|
import requests
|
||||||
import logging
|
import logging
|
||||||
import json
|
import json
|
||||||
from typing import Optional
|
from typing import Optional, Tuple
|
||||||
import base64
|
import base64
|
||||||
from twisted.web.server import Request
|
from twisted.web.server import Request
|
||||||
|
|
||||||
@@ -180,7 +180,7 @@ class JsonHttpApiFunction(abc.ABC):
|
|||||||
# receives from the a requesting client. The same applies vice versa to class variables that have an "output_"
|
# receives from the a requesting client. The same applies vice versa to class variables that have an "output_"
|
||||||
# prefix.
|
# prefix.
|
||||||
|
|
||||||
# path of the API function (e.g. '/gsma/rsp2/es2plus/confirmOrder')
|
# path of the API function (e.g. '/gsma/rsp2/es2plus/confirmOrder', see also method rewrite_url).
|
||||||
path = None
|
path = None
|
||||||
|
|
||||||
# dictionary of input parameters. key is parameter name, value is ApiParam class
|
# dictionary of input parameters. key is parameter name, value is ApiParam class
|
||||||
@@ -336,6 +336,22 @@ class JsonHttpApiFunction(abc.ABC):
|
|||||||
output[p] = p_class.decode(v)
|
output[p] = p_class.decode(v)
|
||||||
return output
|
return output
|
||||||
|
|
||||||
|
def rewrite_url(self, data: dict, url: str) -> Tuple[dict, str]:
|
||||||
|
"""
|
||||||
|
Rewrite a static URL using information passed in the data dict. This method may be overloaded by a derived
|
||||||
|
class to allow fully dynamic URLs. The input parameters required for the URL rewriting may be passed using
|
||||||
|
data parameter. In case those parameters are additional parameters that are not intended to be passed to
|
||||||
|
the encode_client method later, they must be removed explcitly.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data: (see JsonHttpApiClient and JsonHttpApiServer)
|
||||||
|
url: statically generated URL string (see comment in JsonHttpApiClient)
|
||||||
|
"""
|
||||||
|
|
||||||
|
# This implementation is a placeholder in which we do not perform any URL rewriting. We just pass through data
|
||||||
|
# and url unmodified.
|
||||||
|
return data, url
|
||||||
|
|
||||||
class JsonHttpApiClient():
|
class JsonHttpApiClient():
|
||||||
def __init__(self, api_func: JsonHttpApiFunction, url_prefix: str, func_req_id: Optional[str],
|
def __init__(self, api_func: JsonHttpApiFunction, url_prefix: str, func_req_id: Optional[str],
|
||||||
session: requests.Session):
|
session: requests.Session):
|
||||||
@@ -352,8 +368,16 @@ class JsonHttpApiClient():
|
|||||||
self.session = session
|
self.session = session
|
||||||
|
|
||||||
def call(self, data: dict, func_call_id: Optional[str] = None, timeout=10) -> Optional[dict]:
|
def call(self, data: dict, func_call_id: Optional[str] = None, timeout=10) -> Optional[dict]:
|
||||||
"""Make an API call to the HTTP API endpoint represented by this object. Input data is passed in `data` as
|
"""
|
||||||
json-serializable dict. Output data is returned as json-deserialized dict."""
|
Make an API call to the HTTP API endpoint represented by this object. Input data is passed in `data` as
|
||||||
|
json-serializable fields. `data` may also contain additional parameters required for URL rewriting (see
|
||||||
|
rewrite_url in class JsonHttpApiFunction). Output data is returned as json-deserialized dict.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data: Input data required to perform the request.
|
||||||
|
func_call_id: Function Call Identifier, if present a header field is generated automatically.
|
||||||
|
timeout: Maximum amount of time to wait for the request to complete.
|
||||||
|
"""
|
||||||
|
|
||||||
# In case a function caller ID is supplied, use it together with the stored function requestor ID to generate
|
# In case a function caller ID is supplied, use it together with the stored function requestor ID to generate
|
||||||
# and prepend the header field according to SGP.22, section 6.5.1.1 and 6.5.1.3. (the presence of the header
|
# and prepend the header field according to SGP.22, section 6.5.1.1 and 6.5.1.3. (the presence of the header
|
||||||
@@ -362,6 +386,11 @@ class JsonHttpApiClient():
|
|||||||
data = {'header' : {'functionRequesterIdentifier': self.func_req_id,
|
data = {'header' : {'functionRequesterIdentifier': self.func_req_id,
|
||||||
'functionCallIdentifier': func_call_id}} | data
|
'functionCallIdentifier': func_call_id}} | data
|
||||||
|
|
||||||
|
# The URL used for the HTTP request (see below) normally consists of the initially given url_prefix
|
||||||
|
# concatenated with the path defined by the JsonHttpApiFunction definition. This static URL path may be
|
||||||
|
# rewritten by rewrite_url method defined in the JsonHttpApiFunction.
|
||||||
|
data, url = self.api_func.rewrite_url(data, self.url_prefix + self.api_func.path)
|
||||||
|
|
||||||
# Encode the message (the presence of mandatory fields is checked during encoding)
|
# Encode the message (the presence of mandatory fields is checked during encoding)
|
||||||
encoded = json.dumps(self.api_func.encode_client(data))
|
encoded = json.dumps(self.api_func.encode_client(data))
|
||||||
|
|
||||||
@@ -373,7 +402,6 @@ class JsonHttpApiClient():
|
|||||||
req_headers.update(self.api_func.extra_http_req_headers)
|
req_headers.update(self.api_func.extra_http_req_headers)
|
||||||
|
|
||||||
# Perform HTTP request
|
# Perform HTTP request
|
||||||
url = self.url_prefix + self.api_func.path
|
|
||||||
logger.debug("HTTP REQ %s - hdr: %s '%s'" % (url, req_headers, encoded))
|
logger.debug("HTTP REQ %s - hdr: %s '%s'" % (url, req_headers, encoded))
|
||||||
response = self.session.request(self.api_func.http_method, url, data=encoded, headers=req_headers, timeout=timeout)
|
response = self.session.request(self.api_func.http_method, url, data=encoded, headers=req_headers, timeout=timeout)
|
||||||
logger.debug("HTTP RSP-STS: [%u] hdr: %s" % (response.status_code, response.headers))
|
logger.debug("HTTP RSP-STS: [%u] hdr: %s" % (response.status_code, response.headers))
|
||||||
|
|||||||
@@ -151,6 +151,8 @@ class File:
|
|||||||
self.df_name = None
|
self.df_name = None
|
||||||
self.fill_pattern = None
|
self.fill_pattern = None
|
||||||
self.fill_pattern_repeat = False
|
self.fill_pattern_repeat = False
|
||||||
|
self.pstdo = None # pinStatusTemplateDO, mandatory for DF/ADF
|
||||||
|
self.lcsi = None # optional life cycle status indicator
|
||||||
# apply some defaults from profile
|
# apply some defaults from profile
|
||||||
if self.template:
|
if self.template:
|
||||||
self.from_template(self.template)
|
self.from_template(self.template)
|
||||||
@@ -278,6 +280,8 @@ class File:
|
|||||||
elif self.file_type in ['MF', 'DF', 'ADF']:
|
elif self.file_type in ['MF', 'DF', 'ADF']:
|
||||||
fdb_dec['file_type'] = 'df'
|
fdb_dec['file_type'] = 'df'
|
||||||
fdb_dec['structure'] = 'no_info_given'
|
fdb_dec['structure'] = 'no_info_given'
|
||||||
|
# pinStatusTemplateDO is mandatory for DF/ADF
|
||||||
|
fileDescriptor['pinStatusTemplateDO'] = self.pstdo
|
||||||
# build file descriptor based on above input data
|
# build file descriptor based on above input data
|
||||||
fd_dict = {}
|
fd_dict = {}
|
||||||
if len(fdb_dec):
|
if len(fdb_dec):
|
||||||
@@ -304,6 +308,8 @@ class File:
|
|||||||
# desired fill or repeat pattern in the "proprietaryEFInfo" element for the EF in Profiles
|
# desired fill or repeat pattern in the "proprietaryEFInfo" element for the EF in Profiles
|
||||||
# downloaded to a V2.2 or earlier eUICC.
|
# downloaded to a V2.2 or earlier eUICC.
|
||||||
fileDescriptor['proprietaryEFInfo'] = pefi
|
fileDescriptor['proprietaryEFInfo'] = pefi
|
||||||
|
if self.lcsi:
|
||||||
|
fileDescriptor['lcsi'] = self.lcsi
|
||||||
logger.debug("%s: to_fileDescriptor(%s)" % (self, fileDescriptor))
|
logger.debug("%s: to_fileDescriptor(%s)" % (self, fileDescriptor))
|
||||||
return fileDescriptor
|
return fileDescriptor
|
||||||
|
|
||||||
@@ -323,6 +329,8 @@ class File:
|
|||||||
if efFileSize:
|
if efFileSize:
|
||||||
self._file_size = self._decode_file_size(efFileSize)
|
self._file_size = self._decode_file_size(efFileSize)
|
||||||
|
|
||||||
|
self.pstdo = fileDescriptor.get('pinStatusTemplateDO', None)
|
||||||
|
self.lcsi = fileDescriptor.get('lcsi', None)
|
||||||
pefi = fileDescriptor.get('proprietaryEFInfo', {})
|
pefi = fileDescriptor.get('proprietaryEFInfo', {})
|
||||||
securityAttributesReferenced = fileDescriptor.get('securityAttributesReferenced', None)
|
securityAttributesReferenced = fileDescriptor.get('securityAttributesReferenced', None)
|
||||||
if securityAttributesReferenced:
|
if securityAttributesReferenced:
|
||||||
@@ -433,7 +441,7 @@ class File:
|
|||||||
elif k == 'fillFileContent':
|
elif k == 'fillFileContent':
|
||||||
stream.write(v)
|
stream.write(v)
|
||||||
else:
|
else:
|
||||||
return ValueError("Unknown key '%s' in tuple list" % k)
|
raise ValueError("Unknown key '%s' in tuple list" % k)
|
||||||
return stream.getvalue()
|
return stream.getvalue()
|
||||||
|
|
||||||
def file_content_to_tuples(self, optimize:bool = False) -> List[Tuple]:
|
def file_content_to_tuples(self, optimize:bool = False) -> List[Tuple]:
|
||||||
|
|||||||
@@ -30,6 +30,7 @@ import tempfile
|
|||||||
import json
|
import json
|
||||||
import abc
|
import abc
|
||||||
import inspect
|
import inspect
|
||||||
|
import os
|
||||||
|
|
||||||
import cmd2
|
import cmd2
|
||||||
from cmd2 import CommandSet, with_default_category
|
from cmd2 import CommandSet, with_default_category
|
||||||
@@ -552,6 +553,85 @@ class CardADF(CardDF):
|
|||||||
return lchan.selected_file.application.export(as_json, lchan)
|
return lchan.selected_file.application.export(as_json, lchan)
|
||||||
|
|
||||||
|
|
||||||
|
class JsonEditor:
|
||||||
|
"""Context manager for editing a JSON-encoded EF value in an external editor.
|
||||||
|
|
||||||
|
Writes the current JSON value (plus encode/decode examples as //-comments)
|
||||||
|
to a temporary file, opens the user's editor, then reads the result back
|
||||||
|
(stripping comment lines) and returns it as the context variable::
|
||||||
|
|
||||||
|
with JsonEditor(self._cmd, orig_json, ef) as edited_json:
|
||||||
|
if edited_json != orig_json:
|
||||||
|
...write back...
|
||||||
|
"""
|
||||||
|
def __init__(self, cmd, orig_json, ef):
|
||||||
|
self._cmd = cmd
|
||||||
|
self._orig_json = orig_json
|
||||||
|
self._ef = ef
|
||||||
|
self._file = None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _strip_comments(text: str) -> str:
|
||||||
|
"""Strip //-comment lines from text before JSON parsing."""
|
||||||
|
# TODO: also strip inline comments?
|
||||||
|
return '\n'.join(line for line in text.splitlines() if not line.lstrip().startswith('//'))
|
||||||
|
|
||||||
|
def _append_examples_as_comments(self, text_file) -> None:
|
||||||
|
"""Append encode/decode test vectors as //-comment lines to an open file.
|
||||||
|
The examples are taken from _test_de_encode and _test_decode class
|
||||||
|
attributes (same source as the auto-generated filesystem documentation).
|
||||||
|
The comment block is intentionally ignored on read-back by _strip_comments."""
|
||||||
|
vectors = []
|
||||||
|
for attr in ('_test_de_encode', '_test_decode'):
|
||||||
|
v = getattr(type(self._ef), attr, None)
|
||||||
|
if v:
|
||||||
|
vectors.extend(v)
|
||||||
|
if not vectors:
|
||||||
|
return
|
||||||
|
ef = self._ef
|
||||||
|
parts = [ef.fully_qualified_path_str()]
|
||||||
|
if ef.fid:
|
||||||
|
parts.append(f'({ef.fid.upper()})')
|
||||||
|
if ef.desc:
|
||||||
|
parts.append(f'- {ef.desc}')
|
||||||
|
text_file.write(f'\n\n// {" ".join(parts)}\n')
|
||||||
|
text_file.write('// Examples (ignored on save):\n')
|
||||||
|
for t in vectors:
|
||||||
|
if len(t) >= 3:
|
||||||
|
encoded, record_nr, decoded = t[0], t[1], t[2]
|
||||||
|
text_file.write(f'// record {record_nr}: {encoded}\n')
|
||||||
|
else:
|
||||||
|
encoded, decoded = t[0], t[1]
|
||||||
|
text_file.write(f'// file: {encoded}\n')
|
||||||
|
for line in json.dumps(decoded, indent=4, cls=JsonEncoder).splitlines():
|
||||||
|
text_file.write(f'// {line}\n')
|
||||||
|
|
||||||
|
def __enter__(self) -> object:
|
||||||
|
"""Write JSON + examples to a temp file, run the editor, return parsed result.
|
||||||
|
|
||||||
|
On JSONDecodeError the user is offered the option to re-open the file
|
||||||
|
and fix the mistake interactively. The temp file is removed by __exit__()
|
||||||
|
on success, or when the user declines to retry."""
|
||||||
|
self._file = tempfile.NamedTemporaryFile(prefix='pysim_', suffix='.json',
|
||||||
|
mode='w', delete=False)
|
||||||
|
json.dump(self._orig_json, self._file, indent=4, cls=JsonEncoder)
|
||||||
|
self._append_examples_as_comments(self._file)
|
||||||
|
self._file.close()
|
||||||
|
while True:
|
||||||
|
self._cmd.run_editor(self._file.name)
|
||||||
|
try:
|
||||||
|
with open(self._file.name, 'r') as f:
|
||||||
|
return json.loads(self._strip_comments(f.read()))
|
||||||
|
except json.JSONDecodeError as e:
|
||||||
|
self._cmd.perror(f'Invalid JSON: {e}')
|
||||||
|
answer = self._cmd.read_input('Re-open file for editing? [y]es/[n]o: ')
|
||||||
|
if answer not in ('y', 'yes'):
|
||||||
|
return self._orig_json
|
||||||
|
|
||||||
|
def __exit__(self, *args):
|
||||||
|
os.unlink(self._file.name)
|
||||||
|
|
||||||
|
|
||||||
class CardEF(CardFile):
|
class CardEF(CardFile):
|
||||||
"""EF (Entry File) in the smart card filesystem"""
|
"""EF (Entry File) in the smart card filesystem"""
|
||||||
|
|
||||||
@@ -657,15 +737,8 @@ class TransparentEF(CardEF):
|
|||||||
def do_edit_binary_decoded(self, _opts):
|
def do_edit_binary_decoded(self, _opts):
|
||||||
"""Edit the JSON representation of the EF contents in an editor."""
|
"""Edit the JSON representation of the EF contents in an editor."""
|
||||||
(orig_json, _sw) = self._cmd.lchan.read_binary_dec()
|
(orig_json, _sw) = self._cmd.lchan.read_binary_dec()
|
||||||
with tempfile.TemporaryDirectory(prefix='pysim_') as dirname:
|
ef = self._cmd.lchan.selected_file
|
||||||
filename = '%s/file' % dirname
|
with JsonEditor(self._cmd, orig_json, ef) as edited_json:
|
||||||
# write existing data as JSON to file
|
|
||||||
with open(filename, 'w') as text_file:
|
|
||||||
json.dump(orig_json, text_file, indent=4, cls=JsonEncoder)
|
|
||||||
# run a text editor
|
|
||||||
self._cmd.run_editor(filename)
|
|
||||||
with open(filename, 'r') as text_file:
|
|
||||||
edited_json = json.load(text_file)
|
|
||||||
if edited_json == orig_json:
|
if edited_json == orig_json:
|
||||||
self._cmd.poutput("Data not modified, skipping write")
|
self._cmd.poutput("Data not modified, skipping write")
|
||||||
else:
|
else:
|
||||||
@@ -959,15 +1032,8 @@ class LinFixedEF(CardEF):
|
|||||||
def do_edit_record_decoded(self, opts):
|
def do_edit_record_decoded(self, opts):
|
||||||
"""Edit the JSON representation of one record in an editor."""
|
"""Edit the JSON representation of one record in an editor."""
|
||||||
(orig_json, _sw) = self._cmd.lchan.read_record_dec(opts.RECORD_NR)
|
(orig_json, _sw) = self._cmd.lchan.read_record_dec(opts.RECORD_NR)
|
||||||
with tempfile.TemporaryDirectory(prefix='pysim_') as dirname:
|
ef = self._cmd.lchan.selected_file
|
||||||
filename = '%s/file' % dirname
|
with JsonEditor(self._cmd, orig_json, ef) as edited_json:
|
||||||
# write existing data as JSON to file
|
|
||||||
with open(filename, 'w') as text_file:
|
|
||||||
json.dump(orig_json, text_file, indent=4, cls=JsonEncoder)
|
|
||||||
# run a text editor
|
|
||||||
self._cmd.run_editor(filename)
|
|
||||||
with open(filename, 'r') as text_file:
|
|
||||||
edited_json = json.load(text_file)
|
|
||||||
if edited_json == orig_json:
|
if edited_json == orig_json:
|
||||||
self._cmd.poutput("Data not modified, skipping write")
|
self._cmd.poutput("Data not modified, skipping write")
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -276,7 +276,7 @@ class ListOfSupportedOptions(BER_TLV_IE, tag=0x81):
|
|||||||
class SupportedKeysForScp03(BER_TLV_IE, tag=0x82):
|
class SupportedKeysForScp03(BER_TLV_IE, tag=0x82):
|
||||||
_construct = FlagsEnum(Byte, aes128=0x01, aes192=0x02, aes256=0x04)
|
_construct = FlagsEnum(Byte, aes128=0x01, aes192=0x02, aes256=0x04)
|
||||||
class SupportedTlsCipherSuitesForScp81(BER_TLV_IE, tag=0x83):
|
class SupportedTlsCipherSuitesForScp81(BER_TLV_IE, tag=0x83):
|
||||||
_consuruct = GreedyRange(Int16ub)
|
_construct = GreedyRange(Int16ub)
|
||||||
class ScpInformation(BER_TLV_IE, tag=0xa0, nested=[ScpType, ListOfSupportedOptions, SupportedKeysForScp03,
|
class ScpInformation(BER_TLV_IE, tag=0xa0, nested=[ScpType, ListOfSupportedOptions, SupportedKeysForScp03,
|
||||||
SupportedTlsCipherSuitesForScp81]):
|
SupportedTlsCipherSuitesForScp81]):
|
||||||
pass
|
pass
|
||||||
@@ -319,7 +319,7 @@ class CurrentSecurityLevel(BER_TLV_IE, tag=0xd3):
|
|||||||
# GlobalPlatform v2.3.1 Section 11.3.3.1.3
|
# GlobalPlatform v2.3.1 Section 11.3.3.1.3
|
||||||
class ApplicationAID(BER_TLV_IE, tag=0x4f):
|
class ApplicationAID(BER_TLV_IE, tag=0x4f):
|
||||||
_construct = GreedyBytes
|
_construct = GreedyBytes
|
||||||
class ApplicationTemplate(BER_TLV_IE, tag=0x61, ntested=[ApplicationAID]):
|
class ApplicationTemplate(BER_TLV_IE, tag=0x61, nested=[ApplicationAID]):
|
||||||
pass
|
pass
|
||||||
class ListOfApplications(BER_TLV_IE, tag=0x2f00, nested=[ApplicationTemplate]):
|
class ListOfApplications(BER_TLV_IE, tag=0x2f00, nested=[ApplicationTemplate]):
|
||||||
pass
|
pass
|
||||||
@@ -562,14 +562,14 @@ class ADF_SD(CardADF):
|
|||||||
|
|
||||||
@cmd2.with_argparser(store_data_parser)
|
@cmd2.with_argparser(store_data_parser)
|
||||||
def do_store_data(self, opts):
|
def do_store_data(self, opts):
|
||||||
"""Perform the GlobalPlatform GET DATA command in order to store some card-specific data.
|
"""Perform the GlobalPlatform STORE DATA command in order to store some card-specific data.
|
||||||
See GlobalPlatform CardSpecification v2.3Section 11.11 for details."""
|
See GlobalPlatform CardSpecification v2.3 Section 11.11 for details."""
|
||||||
response_permitted = opts.response == 'may_be_returned'
|
response_permitted = opts.response == 'may_be_returned'
|
||||||
self.store_data(h2b(opts.DATA), opts.data_structure, opts.encryption, response_permitted)
|
self.store_data(h2b(opts.DATA), opts.data_structure, opts.encryption, response_permitted)
|
||||||
|
|
||||||
def store_data(self, data: bytes, structure:str = 'none', encryption:str = 'none', response_permitted: bool = False) -> bytes:
|
def store_data(self, data: bytes, structure:str = 'none', encryption:str = 'none', response_permitted: bool = False) -> bytes:
|
||||||
"""Perform the GlobalPlatform GET DATA command in order to store some card-specific data.
|
"""Perform the GlobalPlatform STORE DATA command in order to store some card-specific data.
|
||||||
See GlobalPlatform CardSpecification v2.3Section 11.11 for details."""
|
See GlobalPlatform CardSpecification v2.3 Section 11.11 for details."""
|
||||||
max_cmd_len = self._cmd.lchan.scc.max_cmd_len
|
max_cmd_len = self._cmd.lchan.scc.max_cmd_len
|
||||||
# Table 11-89 of GP Card Specification v2.3
|
# Table 11-89 of GP Card Specification v2.3
|
||||||
remainder = data
|
remainder = data
|
||||||
@@ -585,7 +585,7 @@ class ADF_SD(CardADF):
|
|||||||
data, _sw = self._cmd.lchan.scc.send_apdu_checksw(hdr + b2h(chunk) + "00")
|
data, _sw = self._cmd.lchan.scc.send_apdu_checksw(hdr + b2h(chunk) + "00")
|
||||||
block_nr += 1
|
block_nr += 1
|
||||||
response += data
|
response += data
|
||||||
return data
|
return h2b(response)
|
||||||
|
|
||||||
put_key_parser = argparse.ArgumentParser()
|
put_key_parser = argparse.ArgumentParser()
|
||||||
put_key_parser.add_argument('--old-key-version-nr', type=auto_uint8, default=0, help='Old Key Version Number')
|
put_key_parser.add_argument('--old-key-version-nr', type=auto_uint8, default=0, help='Old Key Version Number')
|
||||||
@@ -859,20 +859,26 @@ class ADF_SD(CardADF):
|
|||||||
_rsp_hex, _sw = self._cmd.lchan.scc.send_apdu_checksw(cmd_hex)
|
_rsp_hex, _sw = self._cmd.lchan.scc.send_apdu_checksw(cmd_hex)
|
||||||
self._cmd.poutput("Loaded a total of %u bytes in %u blocks. Don't forget install_for_install (and make selectable) now!" % (total_size, block_nr))
|
self._cmd.poutput("Loaded a total of %u bytes in %u blocks. Don't forget install_for_install (and make selectable) now!" % (total_size, block_nr))
|
||||||
|
|
||||||
install_cap_parser = argparse.ArgumentParser()
|
install_cap_parser = argparse.ArgumentParser(usage='%(prog)s FILE [--install-parameters | --install-parameters-*]')
|
||||||
install_cap_parser.add_argument('cap_file', type=str, metavar='FILE',
|
install_cap_parser.add_argument('cap_file', type=str, metavar='FILE',
|
||||||
help='JAVA-CARD CAP file to install')
|
help='JAVA-CARD CAP file to install')
|
||||||
install_cap_parser_inst_prm_g = install_cap_parser.add_mutually_exclusive_group()
|
# Ideally, the parser should enforce that:
|
||||||
install_cap_parser_inst_prm_g.add_argument('--install-parameters', type=is_hexstr, default=None,
|
# * either the `--install-parameters` is given alone,
|
||||||
|
# * or distinct `--install-parameters-*` are optionally given instead.
|
||||||
|
# We tried to achieve this using mutually exclusive groups (add_mutually_exclusive_group).
|
||||||
|
# However, group nesting was never supported, often failed to work correctly, and was unintentionally
|
||||||
|
# exposed through inheritance. It has been deprecated since version 3.11, removed in version 3.14.
|
||||||
|
# Hence, we have to implement the enforcement manually.
|
||||||
|
install_cap_parser_inst_prm_grp = install_cap_parser.add_argument_group('Install Parameters')
|
||||||
|
install_cap_parser_inst_prm_grp.add_argument('--install-parameters', type=is_hexstr, default=None,
|
||||||
help='install Parameters (GPC_SPE_034, section 11.5.2.3.7, table 11-49)')
|
help='install Parameters (GPC_SPE_034, section 11.5.2.3.7, table 11-49)')
|
||||||
install_cap_parser_inst_prm_g_grp = install_cap_parser_inst_prm_g.add_argument_group()
|
install_cap_parser_inst_prm_grp.add_argument('--install-parameters-volatile-memory-quota',
|
||||||
install_cap_parser_inst_prm_g_grp.add_argument('--install-parameters-volatile-memory-quota',
|
|
||||||
type=int, default=None,
|
type=int, default=None,
|
||||||
help='volatile memory quota (GPC_SPE_034, section 11.5.2.3.7, table 11-49)')
|
help='volatile memory quota (GPC_SPE_034, section 11.5.2.3.7, table 11-49)')
|
||||||
install_cap_parser_inst_prm_g_grp.add_argument('--install-parameters-non-volatile-memory-quota',
|
install_cap_parser_inst_prm_grp.add_argument('--install-parameters-non-volatile-memory-quota',
|
||||||
type=int, default=None,
|
type=int, default=None,
|
||||||
help='non volatile memory quota (GPC_SPE_034, section 11.5.2.3.7, table 11-49)')
|
help='non volatile memory quota (GPC_SPE_034, section 11.5.2.3.7, table 11-49)')
|
||||||
install_cap_parser_inst_prm_g_grp.add_argument('--install-parameters-stk',
|
install_cap_parser_inst_prm_grp.add_argument('--install-parameters-stk',
|
||||||
type=is_hexstr, default=None,
|
type=is_hexstr, default=None,
|
||||||
help='Load Parameters (ETSI TS 102 226, section 8.2.1.3.2.1)')
|
help='Load Parameters (ETSI TS 102 226, section 8.2.1.3.2.1)')
|
||||||
|
|
||||||
@@ -888,9 +894,17 @@ class ADF_SD(CardADF):
|
|||||||
load_file_aid = cap.get_loadfile_aid()
|
load_file_aid = cap.get_loadfile_aid()
|
||||||
module_aid = cap.get_applet_aid()
|
module_aid = cap.get_applet_aid()
|
||||||
application_aid = module_aid
|
application_aid = module_aid
|
||||||
if opts.install_parameters:
|
if opts.install_parameters is not None:
|
||||||
|
# `--install-parameters` and `--install-parameters-*` are mutually exclusive
|
||||||
|
# make sure that none of `--install-parameters-*` is given; abort otherwise
|
||||||
|
if any(p is not None for p in [opts.install_parameters_non_volatile_memory_quota,
|
||||||
|
opts.install_parameters_volatile_memory_quota,
|
||||||
|
opts.install_parameters_stk]):
|
||||||
|
self.install_cap_parser.error('arguments --install-parameters-* are '
|
||||||
|
'not allowed with --install-parameters')
|
||||||
install_parameters = opts.install_parameters;
|
install_parameters = opts.install_parameters;
|
||||||
else:
|
else:
|
||||||
|
# `--install-parameters-*` are all optional
|
||||||
install_parameters = gen_install_parameters(opts.install_parameters_non_volatile_memory_quota,
|
install_parameters = gen_install_parameters(opts.install_parameters_non_volatile_memory_quota,
|
||||||
opts.install_parameters_volatile_memory_quota,
|
opts.install_parameters_volatile_memory_quota,
|
||||||
opts.install_parameters_stk)
|
opts.install_parameters_stk)
|
||||||
|
|||||||
@@ -17,6 +17,8 @@
|
|||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
#
|
#
|
||||||
|
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
from osmocom.construct import *
|
from osmocom.construct import *
|
||||||
from osmocom.utils import *
|
from osmocom.utils import *
|
||||||
from osmocom.tlv import *
|
from osmocom.tlv import *
|
||||||
@@ -46,7 +48,9 @@ class InstallParams(TLV_IE_Collection, nested=[AppSpecificParams, SystemSpecific
|
|||||||
# GPD_SPE_013, table 11-49
|
# GPD_SPE_013, table 11-49
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def gen_install_parameters(non_volatile_memory_quota:int, volatile_memory_quota:int, stk_parameter:str):
|
def gen_install_parameters(non_volatile_memory_quota: Optional[int] = None,
|
||||||
|
volatile_memory_quota: Optional[int] = None,
|
||||||
|
stk_parameter: Optional[str] = None):
|
||||||
|
|
||||||
# GPD_SPE_013, table 11-49
|
# GPD_SPE_013, table 11-49
|
||||||
|
|
||||||
@@ -54,19 +58,17 @@ def gen_install_parameters(non_volatile_memory_quota:int, volatile_memory_quota:
|
|||||||
install_params = InstallParams()
|
install_params = InstallParams()
|
||||||
install_params_dict = [{'app_specific_params': None}]
|
install_params_dict = [{'app_specific_params': None}]
|
||||||
|
|
||||||
#Conditional
|
# Collect system specific parameters (optional)
|
||||||
if non_volatile_memory_quota and volatile_memory_quota and stk_parameter:
|
|
||||||
system_specific_params = []
|
system_specific_params = []
|
||||||
#Optional
|
if non_volatile_memory_quota is not None:
|
||||||
if non_volatile_memory_quota:
|
system_specific_params.append({'non_volatile_memory_quota': non_volatile_memory_quota})
|
||||||
system_specific_params += [{'non_volatile_memory_quota': non_volatile_memory_quota}]
|
if volatile_memory_quota is not None:
|
||||||
#Optional
|
system_specific_params.append({'volatile_memory_quota': volatile_memory_quota})
|
||||||
if volatile_memory_quota:
|
if stk_parameter is not None:
|
||||||
system_specific_params += [{'volatile_memory_quota': volatile_memory_quota}]
|
system_specific_params.append({'stk_parameter': stk_parameter})
|
||||||
#Optional
|
# Add system specific parameters to the install parameters, if any
|
||||||
if stk_parameter:
|
if system_specific_params:
|
||||||
system_specific_params += [{'stk_parameter': stk_parameter}]
|
install_params_dict.append({'system_specific_params': system_specific_params})
|
||||||
install_params_dict += [{'system_specific_params': system_specific_params}]
|
|
||||||
|
|
||||||
install_params.from_dict(install_params_dict)
|
install_params.from_dict(install_params_dict)
|
||||||
return b2h(install_params.to_bytes())
|
return b2h(install_params.to_bytes())
|
||||||
|
|||||||
@@ -438,7 +438,7 @@ class Scp03SessionKeys:
|
|||||||
"""Obtain the ICV value computed as described in 6.2.6.
|
"""Obtain the ICV value computed as described in 6.2.6.
|
||||||
This method has two modes:
|
This method has two modes:
|
||||||
* is_response=False for computing the ICV for C-ENC. Will pre-increment the counter.
|
* is_response=False for computing the ICV for C-ENC. Will pre-increment the counter.
|
||||||
* is_response=False for computing the ICV for R-DEC."""
|
* is_response=True for computing the ICV for R-DEC."""
|
||||||
if not is_response:
|
if not is_response:
|
||||||
self.block_nr += 1
|
self.block_nr += 1
|
||||||
# The binary value of this number SHALL be left padded with zeroes to form a full block.
|
# The binary value of this number SHALL be left padded with zeroes to form a full block.
|
||||||
|
|||||||
12
pySim/log.py
12
pySim/log.py
@@ -63,7 +63,7 @@ class PySimLogger:
|
|||||||
raise RuntimeError('static class, do not instantiate')
|
raise RuntimeError('static class, do not instantiate')
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def setup(print_callback = None, colors:dict = {}):
|
def setup(print_callback = None, colors:dict = {}, verbose_debug:bool = False):
|
||||||
"""
|
"""
|
||||||
Set a print callback function and color scheme. This function call is optional. In case this method is not
|
Set a print callback function and color scheme. This function call is optional. In case this method is not
|
||||||
called, default settings apply.
|
called, default settings apply.
|
||||||
@@ -72,10 +72,20 @@ class PySimLogger:
|
|||||||
have the following format: print_callback(message:str)
|
have the following format: print_callback(message:str)
|
||||||
colors : An optional dict through which certain log levels can be assigned a color.
|
colors : An optional dict through which certain log levels can be assigned a color.
|
||||||
(e.g. {logging.WARN: YELLOW})
|
(e.g. {logging.WARN: YELLOW})
|
||||||
|
verbose_debug: Enable verbose logging and set the loglevel DEBUG when set to true. Otherwise the
|
||||||
|
non-verbose logging is used and the loglevel is set to INFO. This setting can be changed
|
||||||
|
using the set_verbose and set_level methods at any time.
|
||||||
"""
|
"""
|
||||||
PySimLogger.print_callback = print_callback
|
PySimLogger.print_callback = print_callback
|
||||||
PySimLogger.colors = colors
|
PySimLogger.colors = colors
|
||||||
|
|
||||||
|
if (verbose_debug):
|
||||||
|
PySimLogger.set_verbose(True)
|
||||||
|
PySimLogger.set_level(logging.DEBUG)
|
||||||
|
else:
|
||||||
|
PySimLogger.set_verbose(False)
|
||||||
|
PySimLogger.set_level(logging.INFO)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def set_verbose(verbose:bool = False):
|
def set_verbose(verbose:bool = False):
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -221,12 +221,12 @@ class OtaAlgoCrypt(OtaAlgo, abc.ABC):
|
|||||||
for subc in cls.__subclasses__():
|
for subc in cls.__subclasses__():
|
||||||
if subc.enum_name == otak.algo_crypt:
|
if subc.enum_name == otak.algo_crypt:
|
||||||
return subc(otak)
|
return subc(otak)
|
||||||
raise ValueError('No implementation for crypt algorithm %s' % otak.algo_auth)
|
raise ValueError('No implementation for crypt algorithm %s' % otak.algo_crypt)
|
||||||
|
|
||||||
class OtaAlgoAuth(OtaAlgo, abc.ABC):
|
class OtaAlgoAuth(OtaAlgo, abc.ABC):
|
||||||
def __init__(self, otak: OtaKeyset):
|
def __init__(self, otak: OtaKeyset):
|
||||||
if self.enum_name != otak.algo_auth:
|
if self.enum_name != otak.algo_auth:
|
||||||
raise ValueError('Cannot use algorithm %s with key for %s' % (self.enum_name, otak.algo_crypt))
|
raise ValueError('Cannot use algorithm %s with key for %s' % (self.enum_name, otak.algo_auth))
|
||||||
super().__init__(otak)
|
super().__init__(otak)
|
||||||
|
|
||||||
def sign(self, data:bytes) -> bytes:
|
def sign(self, data:bytes) -> bytes:
|
||||||
|
|||||||
10
pySim/sms.py
10
pySim/sms.py
@@ -169,8 +169,14 @@ class SMS_TPDU(abc.ABC):
|
|||||||
|
|
||||||
class SMS_DELIVER(SMS_TPDU):
|
class SMS_DELIVER(SMS_TPDU):
|
||||||
"""Representation of a SMS-DELIVER T-PDU. This is the Network to MS/UE (downlink) direction."""
|
"""Representation of a SMS-DELIVER T-PDU. This is the Network to MS/UE (downlink) direction."""
|
||||||
flags_construct = BitStruct('tp_rp'/Flag, 'tp_udhi'/Flag, 'tp_rp'/Flag, 'tp_sri'/Flag,
|
flags_construct = BitStruct('tp_rp'/Flag,
|
||||||
Padding(1), 'tp_mms'/Flag, 'tp_mti'/BitsInteger(2))
|
'tp_udhi'/Flag,
|
||||||
|
'tp_sri'/Flag,
|
||||||
|
Padding(1),
|
||||||
|
'tp_lp'/Flag,
|
||||||
|
'tp_mms'/Flag,
|
||||||
|
'tp_mti'/BitsInteger(2))
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
kwargs['tp_mti'] = 0
|
kwargs['tp_mti'] = 0
|
||||||
super().__init__(**kwargs)
|
super().__init__(**kwargs)
|
||||||
|
|||||||
@@ -90,7 +90,7 @@ class LinkBase(abc.ABC):
|
|||||||
self.sw_interpreter = sw_interpreter
|
self.sw_interpreter = sw_interpreter
|
||||||
self.apdu_tracer = apdu_tracer
|
self.apdu_tracer = apdu_tracer
|
||||||
self.proactive_handler = proactive_handler
|
self.proactive_handler = proactive_handler
|
||||||
self.apdu_strict = False
|
self.apdu_strict = True
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
|
|||||||
@@ -26,6 +26,7 @@ from smartcard.CardRequest import CardRequest
|
|||||||
from smartcard.Exceptions import NoCardException, CardRequestTimeoutException, CardConnectionException
|
from smartcard.Exceptions import NoCardException, CardRequestTimeoutException, CardConnectionException
|
||||||
from smartcard.System import readers
|
from smartcard.System import readers
|
||||||
from smartcard.ExclusiveConnectCardConnection import ExclusiveConnectCardConnection
|
from smartcard.ExclusiveConnectCardConnection import ExclusiveConnectCardConnection
|
||||||
|
from smartcard.ATR import ATR
|
||||||
|
|
||||||
from osmocom.utils import h2i, i2h, Hexstr
|
from osmocom.utils import h2i, i2h, Hexstr
|
||||||
|
|
||||||
@@ -80,23 +81,25 @@ class PcscSimLink(LinkBaseTpdu):
|
|||||||
|
|
||||||
def connect(self):
|
def connect(self):
|
||||||
try:
|
try:
|
||||||
# To avoid leakage of resources, make sure the reader
|
# To avoid leakage of resources, make sure the reader is disconnected
|
||||||
# is disconnected
|
|
||||||
self.disconnect()
|
self.disconnect()
|
||||||
|
|
||||||
# Make card connection and select a suitable communication protocol
|
# Make card connection and select a suitable communication protocol
|
||||||
|
# (Even though pyscard provides an automatic protocol selection, we will make an independent decision
|
||||||
|
# based on the ATR. There are two reasons for that:
|
||||||
|
# 1) In case a card supports T=0 and T=1, we perfer to use T=0.
|
||||||
|
# 2) The automatic protocol selection may be unreliabe on some platforms
|
||||||
|
# see also: https://osmocom.org/issues/6952)
|
||||||
self._con.connect()
|
self._con.connect()
|
||||||
supported_protocols = self._con.getProtocol();
|
atr = ATR(self._con.getATR())
|
||||||
self.disconnect()
|
if atr.isT0Supported():
|
||||||
if (supported_protocols & CardConnection.T0_protocol):
|
self._con.setProtocol(CardConnection.T0_protocol)
|
||||||
protocol = CardConnection.T0_protocol
|
|
||||||
self.set_tpdu_format(0)
|
self.set_tpdu_format(0)
|
||||||
elif (supported_protocols & CardConnection.T1_protocol):
|
elif atr.isT1Supported():
|
||||||
protocol = CardConnection.T1_protocol
|
self._con.setProtocol(CardConnection.T1_protocol)
|
||||||
self.set_tpdu_format(1)
|
self.set_tpdu_format(1)
|
||||||
else:
|
else:
|
||||||
raise ReaderError('Unsupported card protocol')
|
raise ReaderError('Unsupported card protocol')
|
||||||
self._con.connect(protocol)
|
|
||||||
except CardConnectionException as exc:
|
except CardConnectionException as exc:
|
||||||
raise ProtocolError() from exc
|
raise ProtocolError() from exc
|
||||||
except NoCardException as exc:
|
except NoCardException as exc:
|
||||||
|
|||||||
@@ -1058,7 +1058,7 @@ class EF_OCSGL(LinFixedEF):
|
|||||||
# TS 31.102 Section 4.4.11.2 (Rel 15)
|
# TS 31.102 Section 4.4.11.2 (Rel 15)
|
||||||
class EF_5GS3GPPLOCI(TransparentEF):
|
class EF_5GS3GPPLOCI(TransparentEF):
|
||||||
def __init__(self, fid='4f01', sfid=0x01, name='EF.5GS3GPPLOCI', size=(20, 20),
|
def __init__(self, fid='4f01', sfid=0x01, name='EF.5GS3GPPLOCI', size=(20, 20),
|
||||||
desc='5S 3GP location information', **kwargs):
|
desc='5GS 3GPP location information', **kwargs):
|
||||||
super().__init__(fid, sfid=sfid, name=name, desc=desc, size=size, **kwargs)
|
super().__init__(fid, sfid=sfid, name=name, desc=desc, size=size, **kwargs)
|
||||||
upd_status_constr = Enum(
|
upd_status_constr = Enum(
|
||||||
Byte, updated=0, not_updated=1, roaming_not_allowed=2)
|
Byte, updated=0, not_updated=1, roaming_not_allowed=2)
|
||||||
@@ -1326,7 +1326,7 @@ class EF_5G_PROSE_UIR(TransparentEF):
|
|||||||
pass
|
pass
|
||||||
class FiveGDdnmfCtfAddrForUploading(BER_TLV_IE, tag=0x97):
|
class FiveGDdnmfCtfAddrForUploading(BER_TLV_IE, tag=0x97):
|
||||||
pass
|
pass
|
||||||
class ProSeConfigDataForUeToNetworkRelayUE(BER_TLV_IE, tag=0xa0,
|
class ProSeConfigDataForUsageInfoReporting(BER_TLV_IE, tag=0xa0,
|
||||||
nested=[EF_5G_PROSE_DD.ValidityTimer,
|
nested=[EF_5G_PROSE_DD.ValidityTimer,
|
||||||
CollectionPeriod, ReportingWindow,
|
CollectionPeriod, ReportingWindow,
|
||||||
ReportingIndicators,
|
ReportingIndicators,
|
||||||
@@ -1336,7 +1336,7 @@ class EF_5G_PROSE_UIR(TransparentEF):
|
|||||||
desc='5G ProSe configuration data for usage information reporting', **kwargs):
|
desc='5G ProSe configuration data for usage information reporting', **kwargs):
|
||||||
super().__init__(fid, sfid=sfid, name=name, desc=desc, **kwargs)
|
super().__init__(fid, sfid=sfid, name=name, desc=desc, **kwargs)
|
||||||
# contains TLV structure despite being TransparentEF, not BER-TLV ?!?
|
# contains TLV structure despite being TransparentEF, not BER-TLV ?!?
|
||||||
self._tlv = EF_5G_PROSE_UIR.ProSeConfigDataForUeToNetworkRelayUE
|
self._tlv = EF_5G_PROSE_UIR.ProSeConfigDataForUsageInfoReporting
|
||||||
|
|
||||||
# TS 31.102 Section 4.4.13.8 (Rel 18)
|
# TS 31.102 Section 4.4.13.8 (Rel 18)
|
||||||
class EF_5G_PROSE_U2URU(TransparentEF):
|
class EF_5G_PROSE_U2URU(TransparentEF):
|
||||||
|
|||||||
@@ -261,6 +261,26 @@ class EF_SMSP(LinFixedEF):
|
|||||||
"numbering_plan_id": "reserved_for_extension" },
|
"numbering_plan_id": "reserved_for_extension" },
|
||||||
"call_number": "" },
|
"call_number": "" },
|
||||||
"tp_pid": b"\x00", "tp_dcs": b"\x00", "tp_vp_minutes": 1440 } ),
|
"tp_pid": b"\x00", "tp_dcs": b"\x00", "tp_vp_minutes": 1440 } ),
|
||||||
|
( 'fffffffffffffffffffffffffffffffffffffffffffffffffdffffffffffffffffffffffff07919403214365f7ffffffffffffff',
|
||||||
|
{ "alpha_id": "", "parameter_indicators": { "tp_dest_addr": False, "tp_sc_addr": True,
|
||||||
|
"tp_pid": False, "tp_dcs": False, "tp_vp": False },
|
||||||
|
"tp_dest_addr": { "length": 255, "ton_npi": { "ext": True, "type_of_number": "reserved_for_extension",
|
||||||
|
"numbering_plan_id": "reserved_for_extension" },
|
||||||
|
"call_number": "" },
|
||||||
|
"tp_sc_addr": { "length": 7, "ton_npi": { "ext": True, "type_of_number": "international",
|
||||||
|
"numbering_plan_id": "isdn_e164" },
|
||||||
|
"call_number": "49301234567" },
|
||||||
|
"tp_pid": b"\xff", "tp_dcs": b"\xff", "tp_vp_minutes": 635040 } ),
|
||||||
|
( 'fffffffffffffffffffffffffffffffffffffffffffffffffc0b919403214365f7ffffffff07919403214365f7ffffffffffffff',
|
||||||
|
{ "alpha_id": "", "parameter_indicators": { "tp_dest_addr": True, "tp_sc_addr": True,
|
||||||
|
"tp_pid": False, "tp_dcs": False, "tp_vp": False },
|
||||||
|
"tp_dest_addr": { "length": 11, "ton_npi": { "ext": True, "type_of_number": "international",
|
||||||
|
"numbering_plan_id": "isdn_e164" },
|
||||||
|
"call_number": "49301234567" },
|
||||||
|
"tp_sc_addr": { "length": 7, "ton_npi": { "ext": True, "type_of_number": "international",
|
||||||
|
"numbering_plan_id": "isdn_e164" },
|
||||||
|
"call_number": "49301234567" },
|
||||||
|
"tp_pid": b"\xff", "tp_dcs": b"\xff", "tp_vp_minutes": 635040 } ),
|
||||||
]
|
]
|
||||||
_test_no_pad = True
|
_test_no_pad = True
|
||||||
class ValidityPeriodAdapter(Adapter):
|
class ValidityPeriodAdapter(Adapter):
|
||||||
@@ -289,16 +309,28 @@ class EF_SMSP(LinFixedEF):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def sc_addr_len(ctx):
|
def sc_addr_len(ctx):
|
||||||
"""Compute the length field for an address field (like TP-DestAddr or TP-ScAddr)."""
|
"""Compute the length field for an address field (see also: 3GPP TS 24.011, section 8.2.5.2)."""
|
||||||
if not hasattr(ctx, 'call_number') or len(ctx.call_number) == 0:
|
if not hasattr(ctx, 'call_number') or len(ctx.call_number) == 0:
|
||||||
return 0xff
|
return 0xff
|
||||||
else:
|
else:
|
||||||
|
# octets required for the call_number + one octet for ton_npi
|
||||||
return bytes_for_nibbles(len(ctx.call_number)) + 1
|
return bytes_for_nibbles(len(ctx.call_number)) + 1
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def dest_addr_len(ctx):
|
||||||
|
"""Compute the length field for an address field (see also: 3GPP TS 23.040, section 9.1.2.5)."""
|
||||||
|
if not hasattr(ctx, 'call_number') or len(ctx.call_number) == 0:
|
||||||
|
return 0xff
|
||||||
|
else:
|
||||||
|
# number of call_number digits
|
||||||
|
return len(ctx.call_number)
|
||||||
|
|
||||||
def __init__(self, fid='6f42', sfid=None, name='EF.SMSP', desc='Short message service parameters', **kwargs):
|
def __init__(self, fid='6f42', sfid=None, name='EF.SMSP', desc='Short message service parameters', **kwargs):
|
||||||
super().__init__(fid, sfid=sfid, name=name, desc=desc, rec_len=(28, None), **kwargs)
|
super().__init__(fid, sfid=sfid, name=name, desc=desc, rec_len=(28, None), **kwargs)
|
||||||
ScAddr = Struct('length'/Rebuild(Int8ub, lambda ctx: EF_SMSP.sc_addr_len(ctx)),
|
ScAddr = Struct('length'/Rebuild(Int8ub, lambda ctx: EF_SMSP.sc_addr_len(ctx)),
|
||||||
'ton_npi'/TonNpi, 'call_number'/PaddedBcdAdapter(Rpad(Bytes(10))))
|
'ton_npi'/TonNpi, 'call_number'/PaddedBcdAdapter(Rpad(Bytes(10))))
|
||||||
|
DestAddr = Struct('length'/Rebuild(Int8ub, lambda ctx: EF_SMSP.dest_addr_len(ctx)),
|
||||||
|
'ton_npi'/TonNpi, 'call_number'/PaddedBcdAdapter(Rpad(Bytes(10))))
|
||||||
self._construct = Struct('alpha_id'/COptional(GsmOrUcs2Adapter(Rpad(Bytes(this._.total_len-28)))),
|
self._construct = Struct('alpha_id'/COptional(GsmOrUcs2Adapter(Rpad(Bytes(this._.total_len-28)))),
|
||||||
'parameter_indicators'/InvertAdapter(BitStruct(
|
'parameter_indicators'/InvertAdapter(BitStruct(
|
||||||
Const(7, BitsInteger(3)),
|
Const(7, BitsInteger(3)),
|
||||||
@@ -307,9 +339,8 @@ class EF_SMSP(LinFixedEF):
|
|||||||
'tp_pid'/Flag,
|
'tp_pid'/Flag,
|
||||||
'tp_sc_addr'/Flag,
|
'tp_sc_addr'/Flag,
|
||||||
'tp_dest_addr'/Flag)),
|
'tp_dest_addr'/Flag)),
|
||||||
'tp_dest_addr'/ScAddr,
|
'tp_dest_addr'/DestAddr,
|
||||||
'tp_sc_addr'/ScAddr,
|
'tp_sc_addr'/ScAddr,
|
||||||
|
|
||||||
'tp_pid'/Bytes(1),
|
'tp_pid'/Bytes(1),
|
||||||
'tp_dcs'/Bytes(1),
|
'tp_dcs'/Bytes(1),
|
||||||
'tp_vp_minutes'/EF_SMSP.ValidityPeriodAdapter(Byte))
|
'tp_vp_minutes'/EF_SMSP.ValidityPeriodAdapter(Byte))
|
||||||
@@ -389,7 +420,7 @@ class DF_TELECOM(CardDF):
|
|||||||
# TS 51.011 Section 10.3.1
|
# TS 51.011 Section 10.3.1
|
||||||
class EF_LP(TransRecEF):
|
class EF_LP(TransRecEF):
|
||||||
_test_de_encode = [
|
_test_de_encode = [
|
||||||
( "24", "24"),
|
( "24", ["24"] ),
|
||||||
]
|
]
|
||||||
def __init__(self, fid='6f05', sfid=None, name='EF.LP', size=(1, None), rec_len=1,
|
def __init__(self, fid='6f05', sfid=None, name='EF.LP', size=(1, None), rec_len=1,
|
||||||
desc='Language Preference'):
|
desc='Language Preference'):
|
||||||
@@ -446,8 +477,8 @@ class EF_IMSI(TransparentEF):
|
|||||||
# TS 51.011 Section 10.3.4
|
# TS 51.011 Section 10.3.4
|
||||||
class EF_PLMNsel(TransRecEF):
|
class EF_PLMNsel(TransRecEF):
|
||||||
_test_de_encode = [
|
_test_de_encode = [
|
||||||
( "22F860", { "mcc": "228", "mnc": "06" } ),
|
( "22F860", [{ "mcc": "228", "mnc": "06" }] ),
|
||||||
( "330420", { "mcc": "334", "mnc": "020" } ),
|
( "330420", [{ "mcc": "334", "mnc": "020" }] ),
|
||||||
]
|
]
|
||||||
def __init__(self, fid='6f30', sfid=None, name='EF.PLMNsel', desc='PLMN selector',
|
def __init__(self, fid='6f30', sfid=None, name='EF.PLMNsel', desc='PLMN selector',
|
||||||
size=(24, None), rec_len=3, **kwargs):
|
size=(24, None), rec_len=3, **kwargs):
|
||||||
@@ -661,7 +692,7 @@ class EF_AD(TransparentEF):
|
|||||||
# TS 51.011 Section 10.3.20 / 10.3.22
|
# TS 51.011 Section 10.3.20 / 10.3.22
|
||||||
class EF_VGCS(TransRecEF):
|
class EF_VGCS(TransRecEF):
|
||||||
_test_de_encode = [
|
_test_de_encode = [
|
||||||
( "92f9ffff", "299" ),
|
( "92f9ffff", ["299"] ),
|
||||||
]
|
]
|
||||||
def __init__(self, fid='6fb1', sfid=None, name='EF.VGCS', size=(4, 200), rec_len=4,
|
def __init__(self, fid='6fb1', sfid=None, name='EF.VGCS', size=(4, 200), rec_len=4,
|
||||||
desc='Voice Group Call Service', **kwargs):
|
desc='Voice Group Call Service', **kwargs):
|
||||||
@@ -797,9 +828,9 @@ class EF_LOCIGPRS(TransparentEF):
|
|||||||
# TS 51.011 Section 10.3.35..37
|
# TS 51.011 Section 10.3.35..37
|
||||||
class EF_xPLMNwAcT(TransRecEF):
|
class EF_xPLMNwAcT(TransRecEF):
|
||||||
_test_de_encode = [
|
_test_de_encode = [
|
||||||
( '62F2104000', { "mcc": "262", "mnc": "01", "act": [ "E-UTRAN NB-S1", "E-UTRAN WB-S1" ] } ),
|
( '62F2104000', [{ "mcc": "262", "mnc": "01", "act": [ "E-UTRAN NB-S1", "E-UTRAN WB-S1" ] }] ),
|
||||||
( '62F2108000', { "mcc": "262", "mnc": "01", "act": [ "UTRAN" ] } ),
|
( '62F2108000', [{ "mcc": "262", "mnc": "01", "act": [ "UTRAN" ] }] ),
|
||||||
( '62F220488C', { "mcc": "262", "mnc": "02", "act": ['E-UTRAN NB-S1', 'E-UTRAN WB-S1', 'EC-GSM-IoT', 'GSM', 'NG-RAN'] } ),
|
( '62F220488C', [{ "mcc": "262", "mnc": "02", "act": ['E-UTRAN NB-S1', 'E-UTRAN WB-S1', 'EC-GSM-IoT', 'GSM', 'NG-RAN'] }] ),
|
||||||
]
|
]
|
||||||
def __init__(self, fid='1234', sfid=None, name=None, desc=None, size=(40, None), rec_len=5, **kwargs):
|
def __init__(self, fid='1234', sfid=None, name=None, desc=None, size=(40, None), rec_len=5, **kwargs):
|
||||||
super().__init__(fid, sfid=sfid, name=name, desc=desc, size=size, rec_len=rec_len, **kwargs)
|
super().__init__(fid, sfid=sfid, name=name, desc=desc, size=size, rec_len=rec_len, **kwargs)
|
||||||
@@ -1034,9 +1065,10 @@ class EF_ICCID(TransparentEF):
|
|||||||
# TS 102 221 Section 13.3 / TS 31.101 Section 13 / TS 51.011 Section 10.1.2
|
# TS 102 221 Section 13.3 / TS 31.101 Section 13 / TS 51.011 Section 10.1.2
|
||||||
class EF_PL(TransRecEF):
|
class EF_PL(TransRecEF):
|
||||||
_test_de_encode = [
|
_test_de_encode = [
|
||||||
( '6465', "de" ),
|
( '6465', ["de"] ),
|
||||||
( '656e', "en" ),
|
( '656e', ["en"] ),
|
||||||
( 'ffff', None ),
|
( 'ffff', [None] ),
|
||||||
|
( '656e64657275ffffffff', ["en", "de", "ru", None, None] ),
|
||||||
]
|
]
|
||||||
|
|
||||||
def __init__(self, fid='2f05', sfid=0x05, name='EF.PL', desc='Preferred Languages'):
|
def __init__(self, fid='2f05', sfid=0x05, name='EF.PL', desc='Preferred Languages'):
|
||||||
@@ -1117,8 +1149,8 @@ class DF_GSM(CardDF):
|
|||||||
EF_MBI(),
|
EF_MBI(),
|
||||||
EF_MWIS(),
|
EF_MWIS(),
|
||||||
EF_CFIS(),
|
EF_CFIS(),
|
||||||
EF_EXT('6fc8', None, 'EF.EXT6', desc='Externsion6 (MBDN)'),
|
EF_EXT('6fc8', None, 'EF.EXT6', desc='Extension6 (MBDN)'),
|
||||||
EF_EXT('6fcc', None, 'EF.EXT7', desc='Externsion7 (CFIS)'),
|
EF_EXT('6fcc', None, 'EF.EXT7', desc='Extension7 (CFIS)'),
|
||||||
EF_SPDI(),
|
EF_SPDI(),
|
||||||
EF_MMSN(),
|
EF_MMSN(),
|
||||||
EF_EXT('6fcf', None, 'EF.EXT8', desc='Extension8 (MMSN)'),
|
EF_EXT('6fcf', None, 'EF.EXT8', desc='Extension8 (MMSN)'),
|
||||||
|
|||||||
@@ -139,7 +139,6 @@ def enc_plmn(mcc: Hexstr, mnc: Hexstr) -> Hexstr:
|
|||||||
|
|
||||||
def dec_plmn(threehexbytes: Hexstr) -> dict:
|
def dec_plmn(threehexbytes: Hexstr) -> dict:
|
||||||
res = {'mcc': "0", 'mnc': "0"}
|
res = {'mcc': "0", 'mnc': "0"}
|
||||||
dec_mcc_from_plmn_str(threehexbytes)
|
|
||||||
res['mcc'] = dec_mcc_from_plmn_str(threehexbytes)
|
res['mcc'] = dec_mcc_from_plmn_str(threehexbytes)
|
||||||
res['mnc'] = dec_mnc_from_plmn_str(threehexbytes)
|
res['mnc'] = dec_mnc_from_plmn_str(threehexbytes)
|
||||||
return res
|
return res
|
||||||
@@ -911,7 +910,8 @@ class DataObjectCollection:
|
|||||||
def encode(self, decoded) -> bytes:
|
def encode(self, decoded) -> bytes:
|
||||||
res = bytearray()
|
res = bytearray()
|
||||||
for i in decoded:
|
for i in decoded:
|
||||||
obj = self.members_by_name(i[0])
|
name = i[0]
|
||||||
|
obj = self.members_by_name[name]
|
||||||
res.append(obj.to_tlv())
|
res.append(obj.to_tlv())
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
Using PC/SC reader interface
|
INFO: Using PC/SC reader interface
|
||||||
Reading ...
|
Reading ...
|
||||||
Autodetected card type: Fairwaves-SIM
|
Autodetected card type: Fairwaves-SIM
|
||||||
ICCID: 8988219000000117833
|
ICCID: 8988219000000117833
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
Using PC/SC reader interface
|
INFO: Using PC/SC reader interface
|
||||||
Reading ...
|
Reading ...
|
||||||
Autodetected card type: Wavemobile-SIM
|
Autodetected card type: Wavemobile-SIM
|
||||||
ICCID: 89445310150011013678
|
ICCID: 89445310150011013678
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
Using PC/SC reader interface
|
INFO: Using PC/SC reader interface
|
||||||
Reading ...
|
Reading ...
|
||||||
Autodetected card type: fakemagicsim
|
Autodetected card type: fakemagicsim
|
||||||
ICCID: 1122334455667788990
|
ICCID: 1122334455667788990
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
Using PC/SC reader interface
|
INFO: Using PC/SC reader interface
|
||||||
Reading ...
|
Reading ...
|
||||||
Autodetected card type: sysmoISIM-SJA2
|
Autodetected card type: sysmoISIM-SJA2
|
||||||
ICCID: 8988211000000467343
|
ICCID: 8988211000000467343
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
Using PC/SC reader interface
|
INFO: Using PC/SC reader interface
|
||||||
Reading ...
|
Reading ...
|
||||||
Autodetected card type: sysmoISIM-SJA5
|
Autodetected card type: sysmoISIM-SJA5
|
||||||
ICCID: 8949440000001155314
|
ICCID: 8949440000001155314
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
Using PC/SC reader interface
|
INFO: Using PC/SC reader interface
|
||||||
Reading ...
|
Reading ...
|
||||||
Autodetected card type: sysmoUSIM-SJS1
|
Autodetected card type: sysmoUSIM-SJS1
|
||||||
ICCID: 8988211320300000028
|
ICCID: 8988211320300000028
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
Using PC/SC reader interface
|
INFO: Using PC/SC reader interface
|
||||||
Reading ...
|
Reading ...
|
||||||
Autodetected card type: sysmosim-gr1
|
Autodetected card type: sysmosim-gr1
|
||||||
ICCID: 2222334455667788990
|
ICCID: 2222334455667788990
|
||||||
|
|||||||
@@ -176,12 +176,11 @@ class TransRecEF_Test(unittest.TestCase):
|
|||||||
|
|
||||||
|
|
||||||
def test_de_encode_record(self):
|
def test_de_encode_record(self):
|
||||||
"""Test the decoder and encoder for a transparent record-oriented EF. Performs first a decoder
|
"""Test the decoder and encoder for a transparent record-oriented EF at the whole-file
|
||||||
test, and then re-encodes the decoded data, comparing the re-encoded data with the
|
level. Performs first a decode test, then re-encodes and compares with the input.
|
||||||
initial input data.
|
|
||||||
|
|
||||||
Requires the given TransRecEF subclass to have a '_test_de_encode' attribute,
|
Requires the given TransRecEF subclass to have a '_test_de_encode' attribute,
|
||||||
containing a list of tuples. Each tuple has to be a 2-tuple (hexstring, decoded_dict).
|
containing a list of 2-tuples (hexstring, decoded_list).
|
||||||
"""
|
"""
|
||||||
for c in self.classes:
|
for c in self.classes:
|
||||||
name = get_qualified_name(c)
|
name = get_qualified_name(c)
|
||||||
@@ -192,14 +191,12 @@ class TransRecEF_Test(unittest.TestCase):
|
|||||||
encoded = t[0]
|
encoded = t[0]
|
||||||
decoded = t[1]
|
decoded = t[1]
|
||||||
logging.debug("Testing decode of %s", name)
|
logging.debug("Testing decode of %s", name)
|
||||||
re_dec = inst.decode_record_hex(encoded)
|
re_dec = inst.decode_hex(encoded)
|
||||||
self.assertEqual(decoded, re_dec)
|
self.assertEqual(decoded, re_dec)
|
||||||
# re-encode the decoded data
|
# re-encode the decoded data
|
||||||
logging.debug("Testing re-encode of %s", name)
|
logging.debug("Testing re-encode of %s", name)
|
||||||
re_enc = inst.encode_record_hex(re_dec, len(encoded)//2)
|
re_enc = inst.encode_hex(re_dec, len(encoded)//2)
|
||||||
self.assertEqual(encoded.upper(), re_enc.upper())
|
self.assertEqual(encoded.upper(), re_enc.upper())
|
||||||
# there's no point in testing padded input, as TransRecEF have a fixed record
|
|
||||||
# size and we cannot ever receive more input data than that size.
|
|
||||||
|
|
||||||
|
|
||||||
class TransparentEF_Test(unittest.TestCase):
|
class TransparentEF_Test(unittest.TestCase):
|
||||||
|
|||||||
144
tests/unittests/test_fs_coverage.py
Normal file
144
tests/unittests/test_fs_coverage.py
Normal file
@@ -0,0 +1,144 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
# (C) 2026 by sysmocom - s.f.m.c. GmbH <info@sysmocom.de>
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 2 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
"""Verify that every CardProfile / CardApplication subclass with EF/DF content,
|
||||||
|
and every standalone CardDF subclass (one not reachable as a child of any profile
|
||||||
|
or application), is either listed in docs/pysim_fs_sphinx.py::SECTIONS or
|
||||||
|
explicitly EXCLUDED."""
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
import importlib
|
||||||
|
import inspect
|
||||||
|
import pkgutil
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
# Make docs/pysim_fs_sphinx.py importable without a full Sphinx build.
|
||||||
|
_DOCS_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..', 'docs')
|
||||||
|
sys.path.insert(0, os.path.abspath(_DOCS_DIR))
|
||||||
|
|
||||||
|
import pySim # noqa: E402
|
||||||
|
from pySim.filesystem import CardApplication, CardDF, CardMF, CardADF # noqa: E402
|
||||||
|
from pySim.profile import CardProfile # noqa: E402
|
||||||
|
from pysim_fs_sphinx import EXCLUDED, SECTIONS # noqa: E402
|
||||||
|
|
||||||
|
|
||||||
|
class TestFsCoverage(unittest.TestCase):
|
||||||
|
"""Ensure SECTIONS + EXCLUDED together account for all classes with content."""
|
||||||
|
|
||||||
|
# Base CardDF types that are not concrete filesystem objects on their own.
|
||||||
|
_DF_BASE_TYPES = frozenset([CardDF, CardMF, CardADF])
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _collect_reachable_df_types(obj) -> set:
|
||||||
|
"""Return the set of all CardDF *types* reachable as children of *obj*."""
|
||||||
|
result = set()
|
||||||
|
if isinstance(obj, CardProfile):
|
||||||
|
children = obj.files_in_mf
|
||||||
|
elif isinstance(obj, CardApplication):
|
||||||
|
result.add(type(obj.adf))
|
||||||
|
children = list(obj.adf.children.values())
|
||||||
|
elif isinstance(obj, CardDF):
|
||||||
|
children = list(obj.children.values())
|
||||||
|
else:
|
||||||
|
return result
|
||||||
|
queue = list(children)
|
||||||
|
while queue:
|
||||||
|
child = queue.pop()
|
||||||
|
if isinstance(child, CardDF):
|
||||||
|
result.add(type(child))
|
||||||
|
queue.extend(child.children.values())
|
||||||
|
return result
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _has_content(obj) -> bool:
|
||||||
|
"""Return True if *obj* owns any EFs/DFs."""
|
||||||
|
if isinstance(obj, CardProfile):
|
||||||
|
return bool(obj.files_in_mf)
|
||||||
|
if isinstance(obj, CardApplication):
|
||||||
|
return bool(obj.adf.children)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def test_all_profiles_and_apps_covered(self):
|
||||||
|
# build a set of (module, class-name) pairs that are already accounted for
|
||||||
|
covered = {(mod, cls) for (_, mod, cls) in SECTIONS}
|
||||||
|
accounted_for = covered | EXCLUDED
|
||||||
|
|
||||||
|
uncovered = []
|
||||||
|
reachable_df_types = set()
|
||||||
|
loaded_modules = {}
|
||||||
|
|
||||||
|
for modinfo in pkgutil.walk_packages(pySim.__path__, prefix='pySim.'):
|
||||||
|
modname = modinfo.name
|
||||||
|
try:
|
||||||
|
module = importlib.import_module(modname)
|
||||||
|
except Exception: # skip inport errors, if any
|
||||||
|
continue
|
||||||
|
loaded_modules[modname] = module
|
||||||
|
|
||||||
|
for name, cls in inspect.getmembers(module, inspect.isclass):
|
||||||
|
# skip classes that are merely imported by this module
|
||||||
|
if cls.__module__ != modname:
|
||||||
|
continue
|
||||||
|
# examine only subclasses of CardProfile and CardApplication
|
||||||
|
if not issubclass(cls, (CardProfile, CardApplication)):
|
||||||
|
continue
|
||||||
|
# skip the abstract base classes themselves
|
||||||
|
if cls in (CardProfile, CardApplication):
|
||||||
|
continue
|
||||||
|
# classes that require constructor arguments cannot be probed
|
||||||
|
try:
|
||||||
|
obj = cls()
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# collect all CardDF types reachable from this profile/application
|
||||||
|
# (used below to identify standalone DFs)
|
||||||
|
reachable_df_types |= self._collect_reachable_df_types(obj)
|
||||||
|
|
||||||
|
if self._has_content(obj) and (modname, name) not in accounted_for:
|
||||||
|
uncovered.append((modname, name))
|
||||||
|
|
||||||
|
# check standalone CardDFs (such as DF.EIRENE or DF.SYSTEM)
|
||||||
|
for modname, module in loaded_modules.items():
|
||||||
|
for name, cls in inspect.getmembers(module, inspect.isclass):
|
||||||
|
if cls.__module__ != modname:
|
||||||
|
continue
|
||||||
|
if not issubclass(cls, CardDF):
|
||||||
|
continue
|
||||||
|
if cls in self._DF_BASE_TYPES:
|
||||||
|
continue
|
||||||
|
if cls in reachable_df_types:
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
obj = cls()
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
if obj.children and (modname, name) not in accounted_for:
|
||||||
|
uncovered.append((modname, name))
|
||||||
|
|
||||||
|
if uncovered:
|
||||||
|
lines = [
|
||||||
|
'The following classes have EFs/DFs, but not listed in SECTIONS or EXCLUDED:',
|
||||||
|
*(f' {modname}.{name}' for modname, name in sorted(uncovered)),
|
||||||
|
'Please modify docs/pysim_fs_sphinx.py accordingly',
|
||||||
|
]
|
||||||
|
self.fail('\n'.join(lines))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
@@ -295,7 +295,7 @@ class Install_param_Test(unittest.TestCase):
|
|||||||
load_parameters = gen_install_parameters(256, 256, '010001001505000000000000000000000000')
|
load_parameters = gen_install_parameters(256, 256, '010001001505000000000000000000000000')
|
||||||
self.assertEqual(load_parameters, 'c900ef1cc8020100c7020100ca12010001001505000000000000000000000000')
|
self.assertEqual(load_parameters, 'c900ef1cc8020100c7020100ca12010001001505000000000000000000000000')
|
||||||
|
|
||||||
load_parameters = gen_install_parameters(None, None, '')
|
load_parameters = gen_install_parameters()
|
||||||
self.assertEqual(load_parameters, 'c900')
|
self.assertEqual(load_parameters, 'c900')
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
Reference in New Issue
Block a user