mirror of
https://gitea.osmocom.org/sim-card/pysim.git
synced 2026-03-17 19:08:35 +03:00
Compare commits
10 Commits
daniel/ota
...
pmaier/pgs
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a3469bc03b | ||
|
|
c118012fb9 | ||
|
|
45bffb53f9 | ||
|
|
cc15b2b4c3 | ||
|
|
11dfad88e6 | ||
|
|
572a81f2af | ||
|
|
ff4f2491b8 | ||
|
|
05fd870d1b | ||
|
|
c07ecbae52 | ||
|
|
e20f9e6cdf |
@@ -100,6 +100,7 @@ Please install the following dependencies:
|
|||||||
- pyyaml >= 5.1
|
- pyyaml >= 5.1
|
||||||
- smpp.pdu (from `github.com/hologram-io/smpp.pdu`)
|
- smpp.pdu (from `github.com/hologram-io/smpp.pdu`)
|
||||||
- termcolor
|
- termcolor
|
||||||
|
- psycopg2-binary
|
||||||
|
|
||||||
Example for Debian:
|
Example for Debian:
|
||||||
```sh
|
```sh
|
||||||
|
|||||||
286
contrib/csv-to-pgsql.py
Executable file
286
contrib/csv-to-pgsql.py
Executable file
@@ -0,0 +1,286 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import logging
|
||||||
|
import csv
|
||||||
|
import sys
|
||||||
|
import yaml
|
||||||
|
import psycopg2
|
||||||
|
from psycopg2.sql import Identifier, SQL
|
||||||
|
from pathlib import Path
|
||||||
|
from pySim.log import PySimLogger
|
||||||
|
from packaging import version
|
||||||
|
|
||||||
|
log = PySimLogger.get("CSV2PGQSL")
|
||||||
|
|
||||||
|
class CardKeyDatabase:
|
||||||
|
def __init__(self, config_filename: str, table_name: str, create_table: bool = False, admin: bool = False):
|
||||||
|
"""
|
||||||
|
Initialize database connection and set the table which shall be used as storage for the card key data.
|
||||||
|
In case the specified table does not exist yet it can be created using the create_table_type parameter.
|
||||||
|
|
||||||
|
New tables are always minimal tables which follow a pre-defined table scheme. The user may extend the table
|
||||||
|
with additional columns using the add_cols() later.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tablename : name of the database table to create.
|
||||||
|
create_table_type : type of the table to create ('UICC' or 'EUICC')
|
||||||
|
"""
|
||||||
|
|
||||||
|
def user_from_config_file(config, role: str) -> tuple[str, str]:
|
||||||
|
db_users = config.get('db_users')
|
||||||
|
user = db_users.get(role)
|
||||||
|
if user is None:
|
||||||
|
raise ValueError("user for role '%s' not set up in config file." % role)
|
||||||
|
return user.get('name'), user.get('pass')
|
||||||
|
|
||||||
|
log = PySimLogger.get("PQSQL")
|
||||||
|
self.table = table_name
|
||||||
|
self.cols = None
|
||||||
|
|
||||||
|
# Depending on the table type, the table name must contain either the substring "uicc_keys" or "euicc_keys".
|
||||||
|
# This convention will allow us to deduct the table type from the table name.
|
||||||
|
if "euicc_keys" not in table_name and "uicc_keys" not in table_name:
|
||||||
|
raise ValueError("Table name (%s) should contain the substring \"uicc_keys\" or \"euicc_keys\"" % table_name)
|
||||||
|
|
||||||
|
# Read config file
|
||||||
|
log.info("Using config file: %s", config_filename)
|
||||||
|
with open(config_filename, "r") as cfg:
|
||||||
|
config = yaml.load(cfg, Loader=yaml.FullLoader)
|
||||||
|
host = config.get('host')
|
||||||
|
log.info("Database host: %s", host)
|
||||||
|
db_name = config.get('db_name')
|
||||||
|
log.info("Database name: %s", db_name)
|
||||||
|
table_names = config.get('table_names')
|
||||||
|
username_admin, password_admin = user_from_config_file(config, 'admin')
|
||||||
|
username_importer, password_importer = user_from_config_file(config, 'importer')
|
||||||
|
username_reader, _ = user_from_config_file(config, 'reader')
|
||||||
|
|
||||||
|
# Switch between admin and importer user
|
||||||
|
if admin:
|
||||||
|
username, password = username_admin, password_admin
|
||||||
|
else:
|
||||||
|
username, password = username_importer, password_importer
|
||||||
|
|
||||||
|
# Create database connection
|
||||||
|
log.info("Database user: %s", username)
|
||||||
|
self.conn = psycopg2.connect(dbname=db_name, user=username, password=password, host=host)
|
||||||
|
self.cur = self.conn.cursor()
|
||||||
|
|
||||||
|
# In the context of this tool it is not relevant if the table name is present in the config file. However,
|
||||||
|
# pySim-shell.py will require the table name to be configured properly to access the database table.
|
||||||
|
if self.table not in table_names:
|
||||||
|
log.warning("Specified table name (%s) is not yet present in config file (required for access from pySim-shell.py)",
|
||||||
|
self.table)
|
||||||
|
|
||||||
|
# Create a new minimal database table of the specified table type.
|
||||||
|
if create_table:
|
||||||
|
if not admin:
|
||||||
|
raise ValueError("creation of new table refused, use option --admin and try again.")
|
||||||
|
if "euicc_keys" in self.table:
|
||||||
|
self.__create_table(username_reader, username_importer, ['EID'])
|
||||||
|
elif "uicc_keys" in self.table:
|
||||||
|
self.__create_table(username_reader, username_importer, ['ICCID', 'IMSI'])
|
||||||
|
|
||||||
|
# Ensure a table with the specified name exists
|
||||||
|
log.info("Database table: %s", self.table)
|
||||||
|
if self.get_cols() == []:
|
||||||
|
raise ValueError("Table name (%s) does not exist yet" % self.table)
|
||||||
|
log.info("Database table columns: %s", str(self.get_cols()))
|
||||||
|
|
||||||
|
def __create_table(self, user_reader:str, user_importer:str, cols:list[str]):
|
||||||
|
"""
|
||||||
|
Initialize a new table. New tables are always minimal tables with one primary key and additional index columns.
|
||||||
|
Non index-columns may be added later using method _update_cols().
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Create table columns with primary key
|
||||||
|
query = SQL("CREATE TABLE {} ({} VARCHAR PRIMARY KEY").format(Identifier(self.table.lower()),
|
||||||
|
Identifier(cols[0].lower()))
|
||||||
|
for c in cols[1:]:
|
||||||
|
query += SQL(", {} VARCHAR").format(Identifier(c.lower()))
|
||||||
|
query += SQL(");")
|
||||||
|
self.cur.execute(query)
|
||||||
|
|
||||||
|
# Create indexes for all other columns
|
||||||
|
for c in cols[1:]:
|
||||||
|
self.cur.execute(query = SQL("CREATE INDEX {} ON {}({});").format(Identifier(c.lower()),
|
||||||
|
Identifier(self.table.lower()),
|
||||||
|
Identifier(c.lower())))
|
||||||
|
|
||||||
|
# Set permissions
|
||||||
|
self.cur.execute(SQL("GRANT INSERT ON {} TO {};").format(Identifier(self.table.lower()),
|
||||||
|
Identifier(user_importer)))
|
||||||
|
self.cur.execute(SQL("GRANT SELECT ON {} TO {};").format(Identifier(self.table.lower()),
|
||||||
|
Identifier(user_reader)))
|
||||||
|
|
||||||
|
log.info("New database table created: %s", str(self.table.lower()))
|
||||||
|
|
||||||
|
def get_cols(self) -> list[str]:
|
||||||
|
"""
|
||||||
|
Get a list of all columns available in the current table scheme.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list with column names (in uppercase) of the database table
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Return cached col list if present
|
||||||
|
if self.cols:
|
||||||
|
return self.cols
|
||||||
|
|
||||||
|
# Request a list of current cols from the database
|
||||||
|
self.cur.execute("SELECT column_name FROM information_schema.columns where table_name = %s;", (self.table.lower(),))
|
||||||
|
|
||||||
|
cols_result = self.cur.fetchall()
|
||||||
|
cols = []
|
||||||
|
for c in cols_result:
|
||||||
|
cols.append(c[0].upper())
|
||||||
|
self.cols = cols
|
||||||
|
return cols
|
||||||
|
|
||||||
|
def get_missing_cols(self, cols_expected:list[str]) -> list[str]:
|
||||||
|
"""
|
||||||
|
Check if the current table scheme lacks any of the given expected columns.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list with the missing columns.
|
||||||
|
"""
|
||||||
|
|
||||||
|
cols_present = self.get_cols()
|
||||||
|
return list(set(cols_expected) - set(cols_present))
|
||||||
|
|
||||||
|
def add_cols(self, cols:list[str]):
|
||||||
|
"""
|
||||||
|
Update the current table scheme with additional columns. In case the updated columns are already exist, the
|
||||||
|
table schema is not changed.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
table : name of the database table to alter
|
||||||
|
cols : list with updated colum names to add
|
||||||
|
"""
|
||||||
|
|
||||||
|
cols_missing = self.get_missing_cols(cols)
|
||||||
|
|
||||||
|
# Depending on the table type (see constructor), we either have a primary key 'ICCID' (for UICC data), or 'EID'
|
||||||
|
# (for eUICC data). Both table formats different types of data and have rather differen columns also. Let's
|
||||||
|
# prevent the excidentally mixing of both types.
|
||||||
|
if 'ICCID' in cols_missing:
|
||||||
|
raise ValueError("Table %s stores eUCCC key material, refusing to add UICC specific column 'ICCID'" % self.table)
|
||||||
|
if 'EID' in cols_missing:
|
||||||
|
raise ValueError("Table %s stores UCCC key material, refusing to add eUICC specific column 'EID'" % self.table)
|
||||||
|
|
||||||
|
# Add the missing columns to the table
|
||||||
|
self.cols = None
|
||||||
|
for c in cols_missing:
|
||||||
|
self.cur.execute(query = SQL("ALTER TABLE {} ADD {} VARCHAR;").format(Identifier(self.table.lower()),
|
||||||
|
Identifier(c.lower())))
|
||||||
|
|
||||||
|
def insert_row(self, row:dict[str, str]):
|
||||||
|
"""
|
||||||
|
Insert a new row into the database table.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
row : dictionary with the colum names and their designated values
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Check if the row is compatible with the current table scheme
|
||||||
|
cols_expected = list(row.keys())
|
||||||
|
cols_missing = self.get_missing_cols(cols_expected)
|
||||||
|
if cols_missing != []:
|
||||||
|
raise ValueError("table %s has incompatible format, the row %s contains unknown cols %s" %
|
||||||
|
(self.table, str(row), str(cols_missing)))
|
||||||
|
|
||||||
|
# Insert row into datbase table
|
||||||
|
row_keys = list(row.keys())
|
||||||
|
row_values = list(row.values())
|
||||||
|
query = SQL("INSERT INTO {} ").format(Identifier(self.table.lower()))
|
||||||
|
query += SQL("({} ").format(Identifier(row_keys[0].lower()))
|
||||||
|
for k in row_keys[1:]:
|
||||||
|
query += SQL(", {}").format(Identifier(k.lower()))
|
||||||
|
query += SQL(") VALUES (%s")
|
||||||
|
for v in row_values[1:]:
|
||||||
|
query += SQL(", %s")
|
||||||
|
query += SQL(");")
|
||||||
|
self.cur.execute(query, row_values)
|
||||||
|
|
||||||
|
def commit(self):
|
||||||
|
self.conn.commit()
|
||||||
|
log.info("Changes to table %s committed!", self.table)
|
||||||
|
|
||||||
|
def open_csv(opts: argparse.Namespace):
|
||||||
|
log.info("CSV file: %s", opts.csv)
|
||||||
|
csv_file = open(opts.csv, 'r')
|
||||||
|
cr = csv.DictReader(csv_file)
|
||||||
|
if not cr:
|
||||||
|
raise RuntimeError("could not open DictReader for CSV-File '%s'" % opts.csv)
|
||||||
|
cr.fieldnames = [field.upper() for field in cr.fieldnames]
|
||||||
|
log.info("CSV file columns: %s", str(cr.fieldnames))
|
||||||
|
return cr
|
||||||
|
|
||||||
|
def open_db(cr: csv.DictReader, opts: argparse.Namespace) -> CardKeyDatabase:
|
||||||
|
try:
|
||||||
|
db = CardKeyDatabase(opts.pqsql, opts.table_name, opts.create_table, opts.admin)
|
||||||
|
|
||||||
|
# Check CSV format against table schema, add missing columns
|
||||||
|
cols_missing = db.get_missing_cols(cr.fieldnames)
|
||||||
|
if cols_missing != [] and (opts.update_columns or opts.create_table):
|
||||||
|
log.info("Adding missing columns: %s", str(cols_missing))
|
||||||
|
db.add_cols(cols_missing)
|
||||||
|
cols_missing = db.get_missing_cols(cr.fieldnames)
|
||||||
|
|
||||||
|
# Make sure the table schema has no missing columns
|
||||||
|
if cols_missing != []:
|
||||||
|
log.error("Database table lacks CSV file columns: %s -- import aborted!", cols_missing)
|
||||||
|
sys.exit(2)
|
||||||
|
except Exception as e:
|
||||||
|
log.error(str(e).strip())
|
||||||
|
log.error("Database initialization aborted due to error!")
|
||||||
|
sys.exit(2)
|
||||||
|
|
||||||
|
return db
|
||||||
|
|
||||||
|
def import_from_csv(db: CardKeyDatabase, cr: csv.DictReader):
|
||||||
|
count = 0
|
||||||
|
for row in cr:
|
||||||
|
try:
|
||||||
|
db.insert_row(row)
|
||||||
|
count+=1
|
||||||
|
if count % 100 == 0:
|
||||||
|
log.info("CSV file import in progress, %d rows imported...", count)
|
||||||
|
except Exception as e:
|
||||||
|
log.error(str(e).strip())
|
||||||
|
log.error("CSV file import aborted due to error, no datasets committed!")
|
||||||
|
sys.exit(2)
|
||||||
|
log.info("CSV file import done, %d rows imported", count)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
option_parser = argparse.ArgumentParser(description='CSV importer for pySim-shell\'s PostgreSQL Card Key Provider',
|
||||||
|
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||||
|
option_parser.add_argument("--verbose", help="Enable verbose logging", action='store_true', default=False)
|
||||||
|
option_parser.add_argument('--pqsql', metavar='FILE',
|
||||||
|
default=str(Path.home()) + "/.osmocom/pysim/card_data_pqsql.cfg",
|
||||||
|
help='Read card data from PostgreSQL database (config file)')
|
||||||
|
option_parser.add_argument('--csv', metavar='FILE', help='input CSV file with card data', required=True)
|
||||||
|
option_parser.add_argument("--table-name", help="name of the card key table", type=str, required=True)
|
||||||
|
option_parser.add_argument("--update-columns", help="add missing table columns", action='store_true', default=False)
|
||||||
|
option_parser.add_argument("--create-table", action='store_true', help="create new card key table", default=False)
|
||||||
|
option_parser.add_argument("--admin", action='store_true', help="perform action as admin", default=False)
|
||||||
|
opts = option_parser.parse_args()
|
||||||
|
|
||||||
|
PySimLogger.setup(print, {logging.WARN: "\033[33m"})
|
||||||
|
if (opts.verbose):
|
||||||
|
PySimLogger.set_verbose(True)
|
||||||
|
PySimLogger.set_level(logging.DEBUG)
|
||||||
|
|
||||||
|
# Open CSV file
|
||||||
|
cr = open_csv(opts)
|
||||||
|
|
||||||
|
# Open database, create initial table, update column scheme
|
||||||
|
db = open_db(cr, opts)
|
||||||
|
|
||||||
|
# Progress with import
|
||||||
|
if not opts.admin:
|
||||||
|
import_from_csv(db, cr)
|
||||||
|
|
||||||
|
# Commit changes to the database
|
||||||
|
db.commit()
|
||||||
@@ -329,7 +329,7 @@ def do_info(pes: ProfileElementSequence, opts):
|
|||||||
print("Security domain Instance AID: %s" % b2h(sd.decoded['instance']['instanceAID']))
|
print("Security domain Instance AID: %s" % b2h(sd.decoded['instance']['instanceAID']))
|
||||||
# FIXME: 'applicationSpecificParametersC9' parsing to figure out enabled SCP
|
# FIXME: 'applicationSpecificParametersC9' parsing to figure out enabled SCP
|
||||||
for key in sd.keys:
|
for key in sd.keys:
|
||||||
print("\tKVN=0x%02x, KID=0x%02x, %s" % (key.key_version_number, key.key_identifier, key.key_components))
|
print("\t%s" % repr(key))
|
||||||
|
|
||||||
# RFM
|
# RFM
|
||||||
print()
|
print()
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
Retrieving card-individual keys via CardKeyProvider
|
Retrieving card-individual keys via CardKeyProvider
|
||||||
===================================================
|
===================================================
|
||||||
|
|
||||||
When working with a batch of cards, or more than one card in general, it
|
When working with a batch of cards, or more than one card in general, it
|
||||||
@@ -20,9 +20,11 @@ example develop your own CardKeyProvider that queries some kind of
|
|||||||
database for the key material, or that uses a key derivation function to
|
database for the key material, or that uses a key derivation function to
|
||||||
derive card-specific key material from a global master key.
|
derive card-specific key material from a global master key.
|
||||||
|
|
||||||
The only actual CardKeyProvider implementation included in pySim is the
|
pySim already includes two CardKeyProvider implementations. One to retrieve
|
||||||
`CardKeyProviderCsv` which retrieves the key material from a
|
key material from a CSV file (`CardKeyProviderCsv`) and a second one that allows
|
||||||
[potentially encrypted] CSV file.
|
to retrieve the key material from a PostgreSQL database (`CardKeyProviderPgsql`).
|
||||||
|
Both implementations equally implement a column encryption scheme that allows
|
||||||
|
to protect sensitive columns using a *transport key*
|
||||||
|
|
||||||
|
|
||||||
The CardKeyProviderCsv
|
The CardKeyProviderCsv
|
||||||
@@ -40,11 +42,215 @@ of pySim-shell. If you do not specify a CSV file, pySim will attempt to
|
|||||||
open a CSV file from the default location at
|
open a CSV file from the default location at
|
||||||
`~/.osmocom/pysim/card_data.csv`, and use that, if it exists.
|
`~/.osmocom/pysim/card_data.csv`, and use that, if it exists.
|
||||||
|
|
||||||
|
The `CardKeyProviderCsv` is suitable to manage small amounts of key material
|
||||||
|
locally. However, if your card inventory is very large and the key material
|
||||||
|
must be made available on multiple sites, the `CardKeyProviderPgsql` is the
|
||||||
|
better option.
|
||||||
|
|
||||||
|
|
||||||
|
The CardKeyProviderPqsql
|
||||||
|
------------------------
|
||||||
|
|
||||||
|
With the `CardKeyProviderPsql` you can use a PostgreSQL database as storage
|
||||||
|
medium. The implementation comes with a CSV importer tool that consumes the
|
||||||
|
same CSV files you would normally use with the `CardKeyProviderCsv`, so you
|
||||||
|
can just use your existing CSV files and import them into the database.
|
||||||
|
|
||||||
|
|
||||||
|
Setting up the database
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
From the perspective of the database, the `CardKeyProviderPsql` has only
|
||||||
|
minimal requirements. You do not have to create any tables in advance. An empty
|
||||||
|
database and at least one user that may create, alter and insert into tables is
|
||||||
|
sufficient. However, for increased reliability and as a protection against
|
||||||
|
incorrect operation, the `CardKeyProviderPsql` supports a hierarchical model
|
||||||
|
with three users (or roles):
|
||||||
|
|
||||||
|
* **admin**:
|
||||||
|
This should be the owner of the database. It is intended to be used for
|
||||||
|
administrative tasks like adding new tables or adding new columns to existing
|
||||||
|
tables. This user should not be used to insert new data into tables or to access
|
||||||
|
data from within pySim-shell using the `CardKeyProviderPsql`
|
||||||
|
|
||||||
|
* **importer**:
|
||||||
|
This user is used when feeding new data into an existing table. It should only
|
||||||
|
be able to insert new rows into existing tables. It should not be used for
|
||||||
|
administrative tasks or to access data from within pySim-shell using the
|
||||||
|
`CardKeyProviderPsql`
|
||||||
|
|
||||||
|
* **reader**:
|
||||||
|
To access data from within pySim shell using the `CardKeyProviderPsql` the
|
||||||
|
reader user is the correct one to use. This user should have no write access
|
||||||
|
to the database or any of the tables.
|
||||||
|
|
||||||
|
|
||||||
|
Creating a config file
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
The default location for the config file is `~/.osmocom/pysim/card_data_pqsql.cfg`
|
||||||
|
The file uses `yaml` syntax and should look like the example below:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
host: "127.0.0.1"
|
||||||
|
db_name: "my_database"
|
||||||
|
table_names:
|
||||||
|
- "uicc_keys"
|
||||||
|
- "euicc_keys"
|
||||||
|
db_users:
|
||||||
|
admin:
|
||||||
|
name: "my_admin_user"
|
||||||
|
pass: "my_admin_password"
|
||||||
|
importer:
|
||||||
|
name: "my_importer_user"
|
||||||
|
pass: "my_importer_password"
|
||||||
|
reader:
|
||||||
|
name: "my_reader_user"
|
||||||
|
pass: "my_reader_password"
|
||||||
|
|
||||||
|
This file is used by pySim-shell and by the importer tool. Both expect the file
|
||||||
|
in the aforementioned location. In case you want to store the file in a
|
||||||
|
different location you may use the `--pgsql` commandline option to provide a
|
||||||
|
custom config file path.
|
||||||
|
|
||||||
|
The hostname and the database name for the PostgreSQL database is set with the
|
||||||
|
`host` and `db_name` fields. The field `db_users` sets the user names and
|
||||||
|
passwords for each of the aforementioned users (or roles). In case only a single
|
||||||
|
admin user is used, all three entries may be populated with the same user name
|
||||||
|
and password (not recommended)
|
||||||
|
|
||||||
|
The field `table_names` sets the tables that the `CardKeyProviderPsql` shall
|
||||||
|
use to query to locate card key data. You can set up as many tables as you
|
||||||
|
want, `CardKeyProviderPsql` will query them in order, one by one until a
|
||||||
|
matching entry is found.
|
||||||
|
|
||||||
|
NOTE: In case you do not want to disclose the admin and the importer credentials
|
||||||
|
to pySim-shell you may remove those lines. pySim-shell will only require the
|
||||||
|
`reader` entry under `db_users`.
|
||||||
|
|
||||||
|
|
||||||
|
Using the Importer
|
||||||
|
^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
Before data can be imported, you must first create a database table. Tables
|
||||||
|
are created with the provided importer tool, which can be found under
|
||||||
|
`contrib/csv-to-pgsql.py`. This tool is used to create the database table and
|
||||||
|
read the data from the provided CSV file into the database.
|
||||||
|
|
||||||
|
As mentioned before, all CSV file formats that work with `CardKeyProviderCsv`
|
||||||
|
may be used. To demonstrate how the import process works, let's assume you want
|
||||||
|
to import a CSV file format that looks like the following example. Let's also
|
||||||
|
assume that you didn't get the Global Platform keys from your card vendor for
|
||||||
|
this batch of UICC cards, so your CSV file lacks the columns for those fields.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
"id","imsi","iccid","acc","pin1","puk1","pin2","puk2","ki","opc","adm1"
|
||||||
|
"card1","999700000000001","8900000000000000001","0001","1111","11111111","0101","01010101","11111111111111111111111111111111","11111111111111111111111111111111","11111111"
|
||||||
|
"card2","999700000000002","8900000000000000002","0002","2222","22222222","0202","02020202","22222222222222222222222222222222","22222222222222222222222222222222","22222222"
|
||||||
|
"card3","999700000000003","8900000000000000003","0003","3333","22222222","0303","03030303","33333333333333333333333333333333","33333333333333333333333333333333","33333333"
|
||||||
|
|
||||||
|
Since this is your first import, the database still lacks the table. To
|
||||||
|
instruct the importer to create a new table, you may use the `--create-table`
|
||||||
|
option. You also have to pick an appropriate name for the table. Any name may
|
||||||
|
be chosen as long as it contains the string `uicc_keys` or `euicc_keys`,
|
||||||
|
depending on the type of data (`UICC` or `eUICC`) you intend to store in the
|
||||||
|
table. The creation of the table is an administrative task and can only be done
|
||||||
|
with the `admin` user. The `admin` user is selected using the `--admin` switch.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
$ PYTHONPATH=../ ./csv-to-pgsql.py --csv ./csv-to-pgsql_example_01.csv --table-name uicc_keys --create-table --admin
|
||||||
|
INFO: CSV file: ./csv-to-pgsql_example_01.csv
|
||||||
|
INFO: CSV file columns: ['ID', 'IMSI', 'ICCID', 'ACC', 'PIN1', 'PUK1', 'PIN2', 'PUK2', 'KI', 'OPC', 'ADM1']
|
||||||
|
INFO: Using config file: /home/user/.osmocom/pysim/card_data_pqsql.cfg
|
||||||
|
INFO: Database host: 127.0.0.1
|
||||||
|
INFO: Database name: my_database
|
||||||
|
INFO: Database user: my_admin_user
|
||||||
|
INFO: New database table created: uicc_keys
|
||||||
|
INFO: Database table: uicc_keys
|
||||||
|
INFO: Database table columns: ['ICCID', 'IMSI']
|
||||||
|
INFO: Adding missing columns: ['PIN2', 'PUK1', 'PUK2', 'ACC', 'ID', 'PIN1', 'ADM1', 'KI', 'OPC']
|
||||||
|
INFO: Changes to table uicc_keys committed!
|
||||||
|
|
||||||
|
The importer has created a new table with the name `uicc_keys`. The table is
|
||||||
|
now ready to be filled with data.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
$ PYTHONPATH=../ ./csv-to-pgsql.py --csv ./csv-to-pgsql_example_01.csv --table-name uicc_keys
|
||||||
|
INFO: CSV file: ./csv-to-pgsql_example_01.csv
|
||||||
|
INFO: CSV file columns: ['ID', 'IMSI', 'ICCID', 'ACC', 'PIN1', 'PUK1', 'PIN2', 'PUK2', 'KI', 'OPC', 'ADM1']
|
||||||
|
INFO: Using config file: /home/user/.osmocom/pysim/card_data_pqsql.cfg
|
||||||
|
INFO: Database host: 127.0.0.1
|
||||||
|
INFO: Database name: my_database
|
||||||
|
INFO: Database user: my_importer_user
|
||||||
|
INFO: Database table: uicc_keys
|
||||||
|
INFO: Database table columns: ['ICCID', 'IMSI', 'PIN2', 'PUK1', 'PUK2', 'ACC', 'ID', 'PIN1', 'ADM1', 'KI', 'OPC']
|
||||||
|
INFO: CSV file import done, 3 rows imported
|
||||||
|
INFO: Changes to table uicc_keys committed!
|
||||||
|
|
||||||
|
A quick `SELECT * FROM uicc_keys;` at the PostgreSQL console should now display
|
||||||
|
the contents of the CSV file you have fed into the importer.
|
||||||
|
|
||||||
|
Let's now assume that with your next batch of UICC cards your vendor includes
|
||||||
|
the Global Platform keys so your CSV format changes. It may now look like this:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
"id","imsi","iccid","acc","pin1","puk1","pin2","puk2","ki","opc","adm1","scp02_dek_1","scp02_enc_1","scp02_mac_1"
|
||||||
|
"card4","999700000000004","8900000000000000004","0004","4444","44444444","0404","04040404","44444444444444444444444444444444","44444444444444444444444444444444","44444444","44444444444444444444444444444444","44444444444444444444444444444444","44444444444444444444444444444444"
|
||||||
|
"card5","999700000000005","8900000000000000005","0005","4444","55555555","0505","05050505","55555555555555555555555555555555","55555555555555555555555555555555","55555555","55555555555555555555555555555555","55555555555555555555555555555555","55555555555555555555555555555555"
|
||||||
|
"card6","999700000000006","8900000000000000006","0006","4444","66666666","0606","06060606","66666666666666666666666666666666","66666666666666666666666666666666","66666666","66666666666666666666666666666666","66666666666666666666666666666666","66666666666666666666666666666666"
|
||||||
|
|
||||||
|
When importing data from an updated CSV format the database table also has
|
||||||
|
to be updated. This is done using the `--update-columns` switch. Like when
|
||||||
|
creating new tables, this operation also requires admin privileges, so the
|
||||||
|
`--admin` switch is required again.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
$ PYTHONPATH=../ ./csv-to-pgsql.py --csv ./csv-to-pgsql_example_02.csv --table-name uicc_keys --update-columns --admin
|
||||||
|
INFO: CSV file: ./csv-to-pgsql_example_02.csv
|
||||||
|
INFO: CSV file columns: ['ID', 'IMSI', 'ICCID', 'ACC', 'PIN1', 'PUK1', 'PIN2', 'PUK2', 'KI', 'OPC', 'ADM1', 'SCP02_DEK_1', 'SCP02_ENC_1', 'SCP02_MAC_1']
|
||||||
|
INFO: Using config file: /home/user/.osmocom/pysim/card_data_pqsql.cfg
|
||||||
|
INFO: Database host: 127.0.0.1
|
||||||
|
INFO: Database name: my_database
|
||||||
|
INFO: Database user: my_admin_user
|
||||||
|
INFO: Database table: uicc_keys
|
||||||
|
INFO: Database table columns: ['ICCID', 'IMSI', 'PIN2', 'PUK1', 'PUK2', 'ACC', 'ID', 'PIN1', 'ADM1', 'KI', 'OPC']
|
||||||
|
INFO: Adding missing columns: ['SCP02_ENC_1', 'SCP02_MAC_1', 'SCP02_DEK_1']
|
||||||
|
INFO: Changes to table uicc_keys committed!
|
||||||
|
|
||||||
|
When the new table columns are added, the import may be continued like the
|
||||||
|
first one:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
$ PYTHONPATH=../ ./csv-to-pgsql.py --csv ./csv-to-pgsql_example_02.csv --table-name uicc_keys
|
||||||
|
INFO: CSV file: ./csv-to-pgsql_example_02.csv
|
||||||
|
INFO: CSV file columns: ['ID', 'IMSI', 'ICCID', 'ACC', 'PIN1', 'PUK1', 'PIN2', 'PUK2', 'KI', 'OPC', 'ADM1', 'SCP02_DEK_1', 'SCP02_ENC_1', 'SCP02_MAC_1']
|
||||||
|
INFO: Using config file: /home/user/.osmocom/pysim/card_data_pqsql.cfg
|
||||||
|
INFO: Database host: 127.0.0.1
|
||||||
|
INFO: Database name: my_database
|
||||||
|
INFO: Database user: my_importer_user
|
||||||
|
INFO: Database table: uicc_keys
|
||||||
|
INFO: Database table columns: ['ICCID', 'IMSI', 'PIN2', 'PUK1', 'PUK2', 'ACC', 'ID', 'PIN1', 'ADM1', 'KI', 'OPC', 'SCP02_ENC_1', 'SCP02_MAC_1', 'SCP02_DEK_1']
|
||||||
|
INFO: CSV file import done, 3 rows imported
|
||||||
|
INFO: Changes to table uicc_keys committed!
|
||||||
|
|
||||||
|
On the PostgreSQL console a `SELECT * FROM uicc_keys;` should now show the
|
||||||
|
imported data with the added columns. All important data should now also be
|
||||||
|
available from within pySim-shell via the `CardKeyProviderPgsql`.
|
||||||
|
|
||||||
|
|
||||||
Column-Level CSV encryption
|
Column-Level CSV encryption
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
---------------------------
|
||||||
|
|
||||||
pySim supports column-level CSV encryption. This feature will make sure
|
pySim supports column-level CSV encryption. This feature will make sure
|
||||||
that your key material is not stored in plaintext in the CSV file.
|
that your key material is not stored in plaintext in the CSV file (or
|
||||||
|
database).
|
||||||
|
|
||||||
The encryption mechanism uses AES in CBC mode. You can use any key
|
The encryption mechanism uses AES in CBC mode. You can use any key
|
||||||
length permitted by AES (128/192/256 bit).
|
length permitted by AES (128/192/256 bit).
|
||||||
@@ -72,6 +278,8 @@ by all columns of the set:
|
|||||||
* `SCP03_ISDA` is a group alias for `SCP03_ENC_ISDA`, `SCP03_MAC_ISDA`, `SCP03_DEK_ISDA`
|
* `SCP03_ISDA` is a group alias for `SCP03_ENC_ISDA`, `SCP03_MAC_ISDA`, `SCP03_DEK_ISDA`
|
||||||
* `SCP03_ISDR` is a group alias for `SCP03_ENC_ISDR`, `SCP03_MAC_ISDR`, `SCP03_DEK_ISDR`
|
* `SCP03_ISDR` is a group alias for `SCP03_ENC_ISDR`, `SCP03_MAC_ISDR`, `SCP03_DEK_ISDR`
|
||||||
|
|
||||||
|
NOTE: When using `CardKeyProviderPqsl`, the input CSV files must be encrypted
|
||||||
|
before import.
|
||||||
|
|
||||||
Field naming
|
Field naming
|
||||||
------------
|
------------
|
||||||
@@ -82,9 +290,9 @@ Field naming
|
|||||||
* For look-up of eUICC specific key material (like SCP03 keys for the
|
* For look-up of eUICC specific key material (like SCP03 keys for the
|
||||||
ISD-R, ECASD), pySim uses the `EID` field as lookup key.
|
ISD-R, ECASD), pySim uses the `EID` field as lookup key.
|
||||||
|
|
||||||
As soon as the CardKeyProviderCsv finds a line (row) in your CSV where
|
As soon as the CardKeyProvider finds a line (row) in your CSV file
|
||||||
the ICCID or EID match, it looks for the column containing the requested
|
(or database) where the ICCID or EID match, it looks for the column containing
|
||||||
data.
|
the requested data.
|
||||||
|
|
||||||
|
|
||||||
ADM PIN
|
ADM PIN
|
||||||
|
|||||||
@@ -69,7 +69,7 @@ from pySim.ts_102_222 import Ts102222Commands
|
|||||||
from pySim.gsm_r import DF_EIRENE
|
from pySim.gsm_r import DF_EIRENE
|
||||||
from pySim.cat import ProactiveCommand
|
from pySim.cat import ProactiveCommand
|
||||||
|
|
||||||
from pySim.card_key_provider import CardKeyProviderCsv
|
from pySim.card_key_provider import CardKeyProviderCsv, CardKeyProviderPgsql
|
||||||
from pySim.card_key_provider import card_key_provider_register, card_key_provider_get_field, card_key_provider_get
|
from pySim.card_key_provider import card_key_provider_register, card_key_provider_get_field, card_key_provider_get
|
||||||
|
|
||||||
from pySim.app import init_card
|
from pySim.app import init_card
|
||||||
@@ -1140,6 +1140,9 @@ card_key_group = option_parser.add_argument_group('Card Key Provider Options')
|
|||||||
card_key_group.add_argument('--csv', metavar='FILE',
|
card_key_group.add_argument('--csv', metavar='FILE',
|
||||||
default=str(Path.home()) + "/.osmocom/pysim/card_data.csv",
|
default=str(Path.home()) + "/.osmocom/pysim/card_data.csv",
|
||||||
help='Read card data from CSV file')
|
help='Read card data from CSV file')
|
||||||
|
card_key_group.add_argument('--pqsql', metavar='FILE',
|
||||||
|
default=str(Path.home()) + "/.osmocom/pysim/card_data_pqsql.cfg",
|
||||||
|
help='Read card data from PostgreSQL database (config file)')
|
||||||
card_key_group.add_argument('--csv-column-key', metavar='FIELD:AES_KEY_HEX', default=[], action='append',
|
card_key_group.add_argument('--csv-column-key', metavar='FIELD:AES_KEY_HEX', default=[], action='append',
|
||||||
help=argparse.SUPPRESS, dest='column_key')
|
help=argparse.SUPPRESS, dest='column_key')
|
||||||
card_key_group.add_argument('--column-key', metavar='FIELD:AES_KEY_HEX', default=[], action='append',
|
card_key_group.add_argument('--column-key', metavar='FIELD:AES_KEY_HEX', default=[], action='append',
|
||||||
@@ -1179,6 +1182,8 @@ if __name__ == '__main__':
|
|||||||
column_keys[name] = key
|
column_keys[name] = key
|
||||||
if os.path.isfile(opts.csv):
|
if os.path.isfile(opts.csv):
|
||||||
card_key_provider_register(CardKeyProviderCsv(opts.csv, column_keys))
|
card_key_provider_register(CardKeyProviderCsv(opts.csv, column_keys))
|
||||||
|
if os.path.isfile(opts.pqsql):
|
||||||
|
card_key_provider_register(CardKeyProviderPgsql(opts.pqsql, column_keys))
|
||||||
|
|
||||||
# Init card reader driver
|
# Init card reader driver
|
||||||
sl = init_reader(opts, proactive_handler = Proact())
|
sl = init_reader(opts, proactive_handler = Proact())
|
||||||
|
|||||||
@@ -36,6 +36,9 @@ from pySim.log import PySimLogger
|
|||||||
import abc
|
import abc
|
||||||
import csv
|
import csv
|
||||||
import logging
|
import logging
|
||||||
|
import yaml
|
||||||
|
import psycopg2
|
||||||
|
from psycopg2.sql import Identifier, SQL
|
||||||
|
|
||||||
log = PySimLogger.get("CARDKEY")
|
log = PySimLogger.get("CARDKEY")
|
||||||
|
|
||||||
@@ -159,6 +162,7 @@ class CardKeyProviderCsv(CardKeyProvider):
|
|||||||
csv_filename : file name (path) of CSV file containing card-individual key/data
|
csv_filename : file name (path) of CSV file containing card-individual key/data
|
||||||
transport_keys : (see class CardKeyFieldCryptor)
|
transport_keys : (see class CardKeyFieldCryptor)
|
||||||
"""
|
"""
|
||||||
|
log.info("Using CSV file as card key data source: %s" % csv_filename)
|
||||||
self.csv_file = open(csv_filename, 'r')
|
self.csv_file = open(csv_filename, 'r')
|
||||||
if not self.csv_file:
|
if not self.csv_file:
|
||||||
raise RuntimeError("Could not open CSV file '%s'" % csv_filename)
|
raise RuntimeError("Could not open CSV file '%s'" % csv_filename)
|
||||||
@@ -186,6 +190,66 @@ class CardKeyProviderCsv(CardKeyProvider):
|
|||||||
return None
|
return None
|
||||||
return return_dict
|
return return_dict
|
||||||
|
|
||||||
|
class CardKeyProviderPgsql(CardKeyProvider):
|
||||||
|
"""Card key provider implementation that allows to query against a specified PostgreSQL database table."""
|
||||||
|
|
||||||
|
def __init__(self, config_filename: str, transport_keys: dict):
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
config_filename : file name (path) of CSV file containing card-individual key/data
|
||||||
|
transport_keys : (see class CardKeyFieldCryptor)
|
||||||
|
"""
|
||||||
|
log.info("Using SQL database as card key data source: %s" % config_filename)
|
||||||
|
with open(config_filename, "r") as cfg:
|
||||||
|
config = yaml.load(cfg, Loader=yaml.FullLoader)
|
||||||
|
log.info("Card key database name: %s" % config.get('db_name'))
|
||||||
|
db_users = config.get('db_users')
|
||||||
|
user = db_users.get('reader')
|
||||||
|
if user is None:
|
||||||
|
raise ValueError("user for role 'reader' not set up in config file.")
|
||||||
|
self.conn = psycopg2.connect(dbname=config.get('db_name'),
|
||||||
|
user=user.get('name'),
|
||||||
|
password=user.get('pass'),
|
||||||
|
host=config.get('host'))
|
||||||
|
self.tables = config.get('table_names')
|
||||||
|
log.info("Card key database tables: %s" % str(self.tables))
|
||||||
|
self.crypt = CardKeyFieldCryptor(transport_keys)
|
||||||
|
|
||||||
|
def get(self, fields: List[str], key: str, value: str) -> Dict[str, str]:
|
||||||
|
db_result = None
|
||||||
|
for t in self.tables:
|
||||||
|
self.conn.rollback()
|
||||||
|
cur = self.conn.cursor()
|
||||||
|
|
||||||
|
# Make sure that the database table and the key column actually exists. If not, move on to the next table
|
||||||
|
cur.execute("SELECT column_name FROM information_schema.columns where table_name = %s;", (t,))
|
||||||
|
cols_result = cur.fetchall()
|
||||||
|
if cols_result == []:
|
||||||
|
log.warning("Card Key database seems to lack table %s, check config file!" % t)
|
||||||
|
continue
|
||||||
|
if (key.lower(),) not in cols_result:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Query requested columns from database table
|
||||||
|
query = SQL("SELECT {}").format(Identifier(fields[0].lower()))
|
||||||
|
for f in fields[1:]:
|
||||||
|
query += SQL(", {}").format(Identifier(f.lower()))
|
||||||
|
query += SQL(" FROM {} WHERE {} = %s LIMIT 1;").format(Identifier(t.lower()),
|
||||||
|
Identifier(key.lower()))
|
||||||
|
cur.execute(query, (value,))
|
||||||
|
db_result = cur.fetchone()
|
||||||
|
cur.close()
|
||||||
|
|
||||||
|
if db_result:
|
||||||
|
break
|
||||||
|
|
||||||
|
if db_result is None:
|
||||||
|
return None
|
||||||
|
result = dict(zip(fields, db_result))
|
||||||
|
|
||||||
|
for k in result.keys():
|
||||||
|
result[k] = self.crypt.decrypt_field(k, result.get(k))
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
def card_key_provider_register(provider: CardKeyProvider, provider_list=card_key_providers):
|
def card_key_provider_register(provider: CardKeyProvider, provider_list=card_key_providers):
|
||||||
|
|||||||
@@ -183,7 +183,7 @@ class File:
|
|||||||
self.file_type = template.file_type
|
self.file_type = template.file_type
|
||||||
self.fid = template.fid
|
self.fid = template.fid
|
||||||
self.sfi = template.sfi
|
self.sfi = template.sfi
|
||||||
self.arr = template.arr.to_bytes(1)
|
self.arr = template.arr.to_bytes(1, 'big')
|
||||||
if hasattr(template, 'rec_len'):
|
if hasattr(template, 'rec_len'):
|
||||||
self.rec_len = template.rec_len
|
self.rec_len = template.rec_len
|
||||||
else:
|
else:
|
||||||
@@ -227,7 +227,7 @@ class File:
|
|||||||
fileDescriptor['shortEFID'] = bytes([self.sfi])
|
fileDescriptor['shortEFID'] = bytes([self.sfi])
|
||||||
if self.df_name:
|
if self.df_name:
|
||||||
fileDescriptor['dfName'] = self.df_name
|
fileDescriptor['dfName'] = self.df_name
|
||||||
if self.arr and self.arr != self.template.arr.to_bytes(1):
|
if self.arr and self.arr != self.template.arr.to_bytes(1, 'big'):
|
||||||
fileDescriptor['securityAttributesReferenced'] = self.arr
|
fileDescriptor['securityAttributesReferenced'] = self.arr
|
||||||
if self.file_type in ['LF', 'CY']:
|
if self.file_type in ['LF', 'CY']:
|
||||||
fdb_dec['file_type'] = 'working_ef'
|
fdb_dec['file_type'] = 'working_ef'
|
||||||
@@ -264,7 +264,7 @@ class File:
|
|||||||
if self.read_and_update_when_deact:
|
if self.read_and_update_when_deact:
|
||||||
spfi |= 0x40 # TS 102 222 Table 5
|
spfi |= 0x40 # TS 102 222 Table 5
|
||||||
if spfi != 0x00:
|
if spfi != 0x00:
|
||||||
pefi['specialFileInformation'] = spfi.to_bytes(1)
|
pefi['specialFileInformation'] = spfi.to_bytes(1, 'big')
|
||||||
if self.fill_pattern:
|
if self.fill_pattern:
|
||||||
if not self.fill_pattern_repeat:
|
if not self.fill_pattern_repeat:
|
||||||
pefi['fillPattern'] = self.fill_pattern
|
pefi['fillPattern'] = self.fill_pattern
|
||||||
@@ -985,9 +985,9 @@ class SecurityDomainKey:
|
|||||||
self.key_components = key_components
|
self.key_components = key_components
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
return 'SdKey(KVN=0x%02x, ID=0x%02x, Usage=%s, Comp=%s)' % (self.key_version_number,
|
return 'SdKey(KVN=0x%02x, ID=0x%02x, Usage=0x%x, Comp=%s)' % (self.key_version_number,
|
||||||
self.key_identifier,
|
self.key_identifier,
|
||||||
self.key_usage_qualifier,
|
build_construct(KeyUsageQualifier, self.key_usage_qualifier)[0],
|
||||||
repr(self.key_components))
|
repr(self.key_components))
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|||||||
@@ -108,7 +108,10 @@ class PySimLogger:
|
|||||||
formatted_message = logging.Formatter.format(PySimLogger.__formatter, record)
|
formatted_message = logging.Formatter.format(PySimLogger.__formatter, record)
|
||||||
color = PySimLogger.colors.get(record.levelno)
|
color = PySimLogger.colors.get(record.levelno)
|
||||||
if color:
|
if color:
|
||||||
PySimLogger.print_callback(style(formatted_message, fg = color))
|
if type(color) is str:
|
||||||
|
PySimLogger.print_callback(color + formatted_message + "\033[0m")
|
||||||
|
else:
|
||||||
|
PySimLogger.print_callback(style(formatted_message, fg = color))
|
||||||
else:
|
else:
|
||||||
PySimLogger.print_callback(formatted_message)
|
PySimLogger.print_callback(formatted_message)
|
||||||
|
|
||||||
|
|||||||
@@ -477,11 +477,15 @@ class RuntimeLchan:
|
|||||||
|
|
||||||
def get_file_for_filename(self, name: str):
|
def get_file_for_filename(self, name: str):
|
||||||
"""Get the related CardFile object for a specified filename."""
|
"""Get the related CardFile object for a specified filename."""
|
||||||
|
if is_hex(name):
|
||||||
|
name = name.lower()
|
||||||
sels = self.selected_file.get_selectables()
|
sels = self.selected_file.get_selectables()
|
||||||
return sels[name]
|
return sels[name]
|
||||||
|
|
||||||
def activate_file(self, name: str):
|
def activate_file(self, name: str):
|
||||||
"""Request ACTIVATE FILE of specified file."""
|
"""Request ACTIVATE FILE of specified file."""
|
||||||
|
if is_hex(name):
|
||||||
|
name = name.lower()
|
||||||
sels = self.selected_file.get_selectables()
|
sels = self.selected_file.get_selectables()
|
||||||
f = sels[name]
|
f = sels[name]
|
||||||
data, sw = self.scc.activate_file(f.fid)
|
data, sw = self.scc.activate_file(f.fid)
|
||||||
|
|||||||
@@ -750,7 +750,7 @@ class EF_ARR(LinFixedEF):
|
|||||||
@cmd2.with_argparser(LinFixedEF.ShellCommands.read_rec_dec_parser)
|
@cmd2.with_argparser(LinFixedEF.ShellCommands.read_rec_dec_parser)
|
||||||
def do_read_arr_record(self, opts):
|
def do_read_arr_record(self, opts):
|
||||||
"""Read one EF.ARR record in flattened, human-friendly form."""
|
"""Read one EF.ARR record in flattened, human-friendly form."""
|
||||||
(data, _sw) = self._cmd.lchan.read_record_dec(opts.record_nr)
|
(data, _sw) = self._cmd.lchan.read_record_dec(opts.RECORD_NR)
|
||||||
data = self._cmd.lchan.selected_file.flatten(data)
|
data = self._cmd.lchan.selected_file.flatten(data)
|
||||||
self._cmd.poutput_json(data, opts.oneline)
|
self._cmd.poutput_json(data, opts.oneline)
|
||||||
|
|
||||||
|
|||||||
@@ -267,11 +267,11 @@ class EF_SMSP(LinFixedEF):
|
|||||||
raise ValueError
|
raise ValueError
|
||||||
def _encode(self, obj, context, path):
|
def _encode(self, obj, context, path):
|
||||||
if obj <= 12*60:
|
if obj <= 12*60:
|
||||||
return obj/5 - 1
|
return obj // 5 - 1
|
||||||
elif obj <= 24*60:
|
elif obj <= 24*60:
|
||||||
return 143 + ((obj - (12 * 60)) // 30)
|
return 143 + ((obj - (12 * 60)) // 30)
|
||||||
elif obj <= 30 * 24 * 60:
|
elif obj <= 30 * 24 * 60:
|
||||||
return 166 + (obj / (24 * 60))
|
return 166 + (obj // (24 * 60))
|
||||||
elif obj <= 63 * 7 * 24 * 60:
|
elif obj <= 63 * 7 * 24 * 60:
|
||||||
return 192 + (obj // (7 * 24 * 60))
|
return 192 + (obj // (7 * 24 * 60))
|
||||||
else:
|
else:
|
||||||
@@ -280,7 +280,7 @@ class EF_SMSP(LinFixedEF):
|
|||||||
def __init__(self, fid='6f42', sfid=None, name='EF.SMSP', desc='Short message service parameters', **kwargs):
|
def __init__(self, fid='6f42', sfid=None, name='EF.SMSP', desc='Short message service parameters', **kwargs):
|
||||||
super().__init__(fid, sfid=sfid, name=name, desc=desc, rec_len=(28, None), **kwargs)
|
super().__init__(fid, sfid=sfid, name=name, desc=desc, rec_len=(28, None), **kwargs)
|
||||||
ScAddr = Struct('length'/Int8ub, 'ton_npi'/TonNpi, 'call_number'/BcdAdapter(Rpad(Bytes(10))))
|
ScAddr = Struct('length'/Int8ub, 'ton_npi'/TonNpi, 'call_number'/BcdAdapter(Rpad(Bytes(10))))
|
||||||
self._construct = Struct('alpha_id'/COptional(GsmStringAdapter(Rpad(Bytes(this._.total_len-28)))),
|
self._construct = Struct('alpha_id'/COptional(GsmOrUcs2Adapter(Rpad(Bytes(this._.total_len-28)))),
|
||||||
'parameter_indicators'/InvertAdapter(FlagsEnum(Byte, tp_dest_addr=1, tp_sc_addr=2,
|
'parameter_indicators'/InvertAdapter(FlagsEnum(Byte, tp_dest_addr=1, tp_sc_addr=2,
|
||||||
tp_pid=3, tp_dcs=4, tp_vp=5)),
|
tp_pid=3, tp_dcs=4, tp_vp=5)),
|
||||||
'tp_dest_addr'/ScAddr,
|
'tp_dest_addr'/ScAddr,
|
||||||
|
|||||||
@@ -15,3 +15,4 @@ git+https://github.com/osmocom/asn1tools
|
|||||||
packaging
|
packaging
|
||||||
git+https://github.com/hologram-io/smpp.pdu
|
git+https://github.com/hologram-io/smpp.pdu
|
||||||
smpp.twisted3 @ git+https://github.com/jookies/smpp.twisted
|
smpp.twisted3 @ git+https://github.com/jookies/smpp.twisted
|
||||||
|
psycopg2-binary
|
||||||
|
|||||||
3
setup.py
3
setup.py
@@ -21,7 +21,7 @@ setup(
|
|||||||
"pyscard",
|
"pyscard",
|
||||||
"pyserial",
|
"pyserial",
|
||||||
"pytlv",
|
"pytlv",
|
||||||
"cmd2 >= 1.5.0",
|
"cmd2 >= 1.5.0, < 3.0",
|
||||||
"jsonpath-ng",
|
"jsonpath-ng",
|
||||||
"construct >= 2.10.70",
|
"construct >= 2.10.70",
|
||||||
"bidict",
|
"bidict",
|
||||||
@@ -34,6 +34,7 @@ setup(
|
|||||||
"smpp.pdu @ git+https://github.com/hologram-io/smpp.pdu",
|
"smpp.pdu @ git+https://github.com/hologram-io/smpp.pdu",
|
||||||
"asn1tools",
|
"asn1tools",
|
||||||
"smpp.twisted3 @ git+https://github.com/jookies/smpp.twisted",
|
"smpp.twisted3 @ git+https://github.com/jookies/smpp.twisted",
|
||||||
|
"psycopg2-binary"
|
||||||
],
|
],
|
||||||
scripts=[
|
scripts=[
|
||||||
'pySim-prog.py',
|
'pySim-prog.py',
|
||||||
|
|||||||
Reference in New Issue
Block a user