mirror of
https://github.com/DJ2LS/FreeDATA
synced 2024-05-14 08:04:33 +00:00
Merge branch 'develop' into dependabot/github_actions/develop/actions/setup-python-5
This commit is contained in:
commit
a37498d84e
|
@ -1,6 +1,7 @@
|
|||
# FreeDATA - Protocols
|
||||
|
||||
## ARQ Sessions
|
||||
|
||||
An ARQ Session represents a reliable data transmission session from a sending station (A) to a receiving station (B). It uses automatic repeat request on top of different codec2 modes according to the transmission channel conditions.
|
||||
|
||||
So lets say A wants to send some data to B. A typical scenario would be like this:
|
||||
|
@ -22,22 +23,19 @@ ISS->(1)IRS:BURST (ID, offset, payload),(ID, offset, payload),(ID, offset, paylo
|
|||
IRS->(1)ISS:DATA ACK NACK (ID, next_offset, speed level, frames, snr)
|
||||
```
|
||||
|
||||
|
||||
### Frame details
|
||||
|
||||
|
||||
#### SESSION_OPEN_REQ
|
||||
|
||||
ISS sends this first
|
||||
|
||||
DATAC13 Mode (12 bytes)
|
||||
|
||||
|field|bytes|
|
||||
|-|-|
|
||||
|session id|1|
|
||||
|origin|6|
|
||||
|destination_crc|3|
|
||||
|
||||
| field | bytes |
|
||||
| --------------- | ----- |
|
||||
| session id | 1 |
|
||||
| origin | 6 |
|
||||
| destination_crc | 3 |
|
||||
|
||||
#### SESSION_OPEN_ACK
|
||||
|
||||
|
@ -45,14 +43,13 @@ Sent by the IRS in response to a SESSION_OPEN_REQ
|
|||
|
||||
DATAC13 Mode (12 bytes)
|
||||
|
||||
|field|bytes|
|
||||
|-|-|
|
||||
|session id|1|
|
||||
|origin|6|
|
||||
|destination_crc|3|
|
||||
|protocol version|1|
|
||||
|snr|1|
|
||||
|
||||
| field | bytes |
|
||||
| ---------------- | ----- |
|
||||
| session id | 1 |
|
||||
| origin | 6 |
|
||||
| destination_crc | 3 |
|
||||
| protocol version | 1 |
|
||||
| snr | 1 |
|
||||
|
||||
#### SESSION_INFO
|
||||
|
||||
|
@ -60,13 +57,12 @@ ISS sends this in response to a SESSION_OPEN_ACK
|
|||
|
||||
DATAC13 Mode (12 bytes)
|
||||
|
||||
|field|bytes|
|
||||
|-|-|
|
||||
|session id|1|
|
||||
|total bytes|4|
|
||||
|total crc|4|
|
||||
|snr|1|
|
||||
|
||||
| field | bytes |
|
||||
| ----------- | ----- |
|
||||
| session id | 1 |
|
||||
| total bytes | 4 |
|
||||
| total crc | 4 |
|
||||
| snr | 1 |
|
||||
|
||||
#### SESSION_INFO_ACK
|
||||
|
||||
|
@ -74,14 +70,13 @@ IRS sends this in response to a SESSION_INFO
|
|||
|
||||
DATAC13 Mode (12 bytes)
|
||||
|
||||
|field|bytes|
|
||||
|-|-|
|
||||
|session id|1|
|
||||
|total crc|4|
|
||||
|snr|1|
|
||||
|speed level|1|
|
||||
|frames per burst|1|
|
||||
|
||||
| field | bytes |
|
||||
| ---------------- | ----- |
|
||||
| session id | 1 |
|
||||
| total crc | 4 |
|
||||
| snr | 1 |
|
||||
| speed level | 1 |
|
||||
| frames per burst | 1 |
|
||||
|
||||
#### Data Burst
|
||||
|
||||
|
@ -92,50 +87,49 @@ Mode according to handshake speed level
|
|||
Frames per burst according to handshake
|
||||
|
||||
##### Modulation
|
||||
|
||||
Each burst is composed of frames_per_burst frames:
|
||||
|
||||
|preamble|f1|f2|f3|...|postamble|
|
||||
|
||||
##### Each data frame
|
||||
|
||||
|field|bytes|
|
||||
|-|-|
|
||||
|session id|1|
|
||||
|offset|4|
|
||||
|payload|(the remaining payload length)|
|
||||
|
||||
| field | bytes |
|
||||
| ---------- | ------------------------------ |
|
||||
| session id | 1 |
|
||||
| offset | 4 |
|
||||
| payload | (the remaining payload length) |
|
||||
|
||||
#### DATA_BURST_ACK
|
||||
|
||||
Sent by the IRS following successful decoding of burst.
|
||||
|
||||
|field|bytes|
|
||||
|-|-|
|
||||
|session id|1|
|
||||
|next offset|4|
|
||||
|next speed level|1|
|
||||
|next frames per burst|1|
|
||||
|snr|1|
|
||||
|
||||
| field | bytes |
|
||||
| --------------------- | ----- |
|
||||
| session id | 1 |
|
||||
| next offset | 4 |
|
||||
| next speed level | 1 |
|
||||
| next frames per burst | 1 |
|
||||
| snr | 1 |
|
||||
|
||||
#### DATA_BURST_NACK
|
||||
|
||||
Sent by the IRS following unsuccessful decoding of burst or timeout.
|
||||
|
||||
|field|bytes|
|
||||
|-|-|
|
||||
|session id|1|
|
||||
|next offset|4|
|
||||
|next speed level|1|
|
||||
|next frames per burst|1|
|
||||
|snr|1|
|
||||
| field | bytes |
|
||||
| --------------------- | ----- |
|
||||
| session id | 1 |
|
||||
| next offset | 4 |
|
||||
| next speed level | 1 |
|
||||
| next frames per burst | 1 |
|
||||
| snr | 1 |
|
||||
|
||||
#### DATA ACK NACK
|
||||
|
||||
Sent by the IRS after receiving data with a state information.
|
||||
|
||||
| field |bytes|
|
||||
|------------|-|
|
||||
| session id |1|
|
||||
| state |1|
|
||||
| snr |1|
|
||||
| field | bytes |
|
||||
| ---------- | ----- |
|
||||
| session id | 1 |
|
||||
| state | 1 |
|
||||
| snr | 1 |
|
||||
|
|
|
@ -19,22 +19,8 @@
|
|||
"files": [
|
||||
"dist",
|
||||
"dist-electron",
|
||||
"../modem/server.dist/",
|
||||
],
|
||||
|
||||
"extraResources": [
|
||||
|
||||
{
|
||||
"from": "../modem/server.dist/",
|
||||
"to": "modem",
|
||||
"filter": [
|
||||
"**/*",
|
||||
"!**/.git"
|
||||
]
|
||||
|
||||
}
|
||||
],
|
||||
|
||||
|
||||
"mac": {
|
||||
"target": [
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
"name": "FreeDATA",
|
||||
"description": "FreeDATA",
|
||||
"private": true,
|
||||
"version": "0.11.1-alpha.3",
|
||||
"version": "0.12.1-alpha",
|
||||
"main": "dist-electron/main/index.js",
|
||||
"scripts": {
|
||||
"start": "vite",
|
||||
|
@ -80,7 +80,7 @@
|
|||
"eslint-plugin-promise": "6.1.1",
|
||||
"eslint-plugin-vue": "9.20.1",
|
||||
"typescript": "5.3.3",
|
||||
"vite": "5.0.10",
|
||||
"vite": "5.0.12",
|
||||
"vite-plugin-electron": "0.28.0",
|
||||
"vite-plugin-electron-renderer": "0.14.5",
|
||||
"vitest": "1.0.2",
|
||||
|
|
|
@ -14,6 +14,7 @@ import { useStateStore } from "../store/stateStore.js";
|
|||
const state = useStateStore(pinia);
|
||||
|
||||
import { settingsStore as settings } from "../store/settingsStore.js";
|
||||
import { getAppDataPath } from "../js/freedata";
|
||||
|
||||
import { displayToast } from "./popupHandler.js";
|
||||
|
||||
|
@ -99,34 +100,8 @@ PouchDB.plugin(require("pouchdb-find"));
|
|||
//PouchDB.plugin(require('pouchdb-replication'));
|
||||
PouchDB.plugin(require("pouchdb-upsert"));
|
||||
|
||||
// https://stackoverflow.com/a/26227660
|
||||
if (typeof process.env["APPDATA"] !== "undefined") {
|
||||
var appDataFolder = process.env["APPDATA"];
|
||||
console.log(appDataFolder);
|
||||
} else {
|
||||
var appDataFolder: string;
|
||||
|
||||
switch (process.platform) {
|
||||
case "darwin":
|
||||
appDataFolder = process.env["HOME"] + "/Library/Application Support";
|
||||
console.log(appDataFolder);
|
||||
break;
|
||||
case "linux":
|
||||
appDataFolder = process.env["HOME"] + "/.config";
|
||||
console.log(appDataFolder);
|
||||
break;
|
||||
case "win32":
|
||||
appDataFolder = "undefined";
|
||||
break;
|
||||
default:
|
||||
appDataFolder = "undefined";
|
||||
break;
|
||||
}
|
||||
}
|
||||
console.log("loading chat database...");
|
||||
console.log("appdata folder:" + appDataFolder);
|
||||
var configFolder = path.join(appDataFolder, "FreeDATA");
|
||||
console.log("config folder:" + configFolder);
|
||||
var appDataPath = getAppDataPath();
|
||||
var configFolder = path.join(appDataPath, "FreeDATA");
|
||||
|
||||
var chatDB = path.join(configFolder, "chatDB");
|
||||
console.log("database path:" + chatDB);
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
const os = require("os");
|
||||
const path = require("path");
|
||||
|
||||
/**
|
||||
* Binary to ASCII replacement
|
||||
* @param {string} data in normal/usual utf-8 format
|
||||
|
@ -97,3 +100,31 @@ export function validateCallsignWithoutSSID(callsign: string) {
|
|||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
export function getAppDataPath() {
|
||||
const platform = os.platform();
|
||||
let appDataPath;
|
||||
|
||||
// Check if running in GitHub Actions
|
||||
const isGitHubActions = process.env.GITHUB_ACTIONS === "true";
|
||||
if (isGitHubActions) {
|
||||
return "/home/runner/work/FreeDATA/FreeDATA/gui/config";
|
||||
}
|
||||
|
||||
switch (platform) {
|
||||
case "darwin": // macOS
|
||||
appDataPath = path.join(os.homedir(), "Library", "Application Support");
|
||||
break;
|
||||
case "win32": // Windows
|
||||
appDataPath =
|
||||
process.env.APPDATA || path.join(os.homedir(), "AppData", "Roaming");
|
||||
break;
|
||||
case "linux": // Linux
|
||||
appDataPath = path.join(os.homedir(), ".config");
|
||||
break;
|
||||
default:
|
||||
throw new Error("Unsupported platform");
|
||||
}
|
||||
|
||||
return appDataPath;
|
||||
}
|
||||
|
|
|
@ -1,9 +1,27 @@
|
|||
import { reactive, ref, watch } from "vue";
|
||||
|
||||
import { getConfig, setConfig } from "../js/api";
|
||||
import { getAppDataPath } from "../js/freedata";
|
||||
import fs from "fs";
|
||||
const path = require("path");
|
||||
const nconf = require("nconf");
|
||||
|
||||
var nconf = require("nconf");
|
||||
nconf.file({ file: "config/config.json" });
|
||||
var appDataPath = getAppDataPath();
|
||||
var configFolder = path.join(appDataPath, "FreeDATA");
|
||||
let configFile = "config.json";
|
||||
|
||||
const isGitHubActions = process.env.GITHUB_ACTIONS === "true";
|
||||
if (isGitHubActions) {
|
||||
configFile = "example.json";
|
||||
configFolder = appDataPath;
|
||||
}
|
||||
|
||||
var configPath = path.join(configFolder, configFile);
|
||||
|
||||
console.log("AppData Path:", appDataPath);
|
||||
console.log(configFolder);
|
||||
console.log(configPath);
|
||||
|
||||
nconf.file({ file: configPath });
|
||||
|
||||
// +++
|
||||
//GUI DEFAULT SETTINGS........
|
||||
|
|
83
modem/arq_data_type_handler.py
Normal file
83
modem/arq_data_type_handler.py
Normal file
|
@ -0,0 +1,83 @@
|
|||
# File: arq_data_type_handler.py
|
||||
|
||||
import structlog
|
||||
import lzma
|
||||
import gzip
|
||||
|
||||
class ARQDataTypeHandler:
|
||||
def __init__(self):
|
||||
self.logger = structlog.get_logger(type(self).__name__)
|
||||
self.handlers = {
|
||||
"raw": {
|
||||
'prepare': self.prepare_raw,
|
||||
'handle': self.handle_raw
|
||||
},
|
||||
"raw_lzma": {
|
||||
'prepare': self.prepare_raw_lzma,
|
||||
'handle': self.handle_raw_lzma
|
||||
},
|
||||
"raw_gzip": {
|
||||
'prepare': self.prepare_raw_gzip,
|
||||
'handle': self.handle_raw_gzip
|
||||
},
|
||||
"p2pmsg_lzma": {
|
||||
'prepare': self.prepare_p2pmsg_lzma,
|
||||
'handle': self.handle_p2pmsg_lzma
|
||||
},
|
||||
}
|
||||
|
||||
def dispatch(self, type_byte: int, data: bytearray):
|
||||
endpoint_name = list(self.handlers.keys())[type_byte]
|
||||
if endpoint_name in self.handlers and 'handle' in self.handlers[endpoint_name]:
|
||||
return self.handlers[endpoint_name]['handle'](data)
|
||||
else:
|
||||
self.log(f"Unknown handling endpoint: {endpoint_name}", isWarning=True)
|
||||
|
||||
def prepare(self, data: bytearray, endpoint_name="raw" ):
|
||||
if endpoint_name in self.handlers and 'prepare' in self.handlers[endpoint_name]:
|
||||
return self.handlers[endpoint_name]['prepare'](data), list(self.handlers.keys()).index(endpoint_name)
|
||||
else:
|
||||
self.log(f"Unknown preparation endpoint: {endpoint_name}", isWarning=True)
|
||||
|
||||
def log(self, message, isWarning=False):
|
||||
msg = f"[{type(self).__name__}]: {message}"
|
||||
logger = self.logger.warn if isWarning else self.logger.info
|
||||
logger(msg)
|
||||
|
||||
def prepare_raw(self, data):
|
||||
self.log(f"Preparing uncompressed data: {len(data)} Bytes")
|
||||
return data
|
||||
|
||||
def handle_raw(self, data):
|
||||
self.log(f"Handling uncompressed data: {len(data)} Bytes")
|
||||
return data
|
||||
|
||||
def prepare_raw_lzma(self, data):
|
||||
compressed_data = lzma.compress(data)
|
||||
self.log(f"Preparing LZMA compressed data: {len(data)} Bytes >>> {len(compressed_data)} Bytes")
|
||||
return compressed_data
|
||||
|
||||
def handle_raw_lzma(self, data):
|
||||
decompressed_data = lzma.decompress(data)
|
||||
self.log(f"Handling LZMA compressed data: {len(decompressed_data)} Bytes from {len(data)} Bytes")
|
||||
return decompressed_data
|
||||
|
||||
def prepare_raw_gzip(self, data):
|
||||
compressed_data = gzip.compress(data)
|
||||
self.log(f"Preparing GZIP compressed data: {len(data)} Bytes >>> {len(compressed_data)} Bytes")
|
||||
return compressed_data
|
||||
|
||||
def handle_raw_gzip(self, data):
|
||||
decompressed_data = gzip.decompress(data)
|
||||
self.log(f"Handling GZIP compressed data: {len(decompressed_data)} Bytes from {len(data)} Bytes")
|
||||
return decompressed_data
|
||||
|
||||
def prepare_p2pmsg_lzma(self, data):
|
||||
compressed_data = lzma.compress(data)
|
||||
self.log(f"Preparing LZMA compressed P2PMSG data: {len(data)} Bytes >>> {len(compressed_data)} Bytes")
|
||||
return compressed_data
|
||||
|
||||
def handle_p2pmsg_lzma(self, data):
|
||||
decompressed_data = lzma.decompress(data)
|
||||
self.log(f"Handling LZMA compressed P2PMSG data: {len(decompressed_data)} Bytes from {len(data)} Bytes")
|
||||
return decompressed_data
|
|
@ -5,6 +5,8 @@ import structlog
|
|||
from event_manager import EventManager
|
||||
from modem_frametypes import FRAME_TYPE
|
||||
import time
|
||||
from arq_data_type_handler import ARQDataTypeHandler
|
||||
|
||||
|
||||
class ARQSession():
|
||||
|
||||
|
@ -44,6 +46,7 @@ class ARQSession():
|
|||
self.frame_factory = data_frame_factory.DataFrameFactory(self.config)
|
||||
self.event_frame_received = threading.Event()
|
||||
|
||||
self.arq_data_type_handler = ARQDataTypeHandler()
|
||||
self.id = None
|
||||
self.session_started = time.time()
|
||||
self.session_ended = 0
|
||||
|
@ -88,10 +91,13 @@ class ARQSession():
|
|||
if self.state in self.STATE_TRANSITION:
|
||||
if frame_type in self.STATE_TRANSITION[self.state]:
|
||||
action_name = self.STATE_TRANSITION[self.state][frame_type]
|
||||
getattr(self, action_name)(frame)
|
||||
received_data, type_byte = getattr(self, action_name)(frame)
|
||||
if isinstance(received_data, bytearray) and isinstance(type_byte, int):
|
||||
self.arq_data_type_handler.dispatch(type_byte, received_data)
|
||||
|
||||
return
|
||||
|
||||
self.log(f"Ignoring unknow transition from state {self.state.name} with frame {frame['frame_type']}")
|
||||
self.log(f"Ignoring unknown transition from state {self.state.name} with frame {frame['frame_type']}")
|
||||
|
||||
def is_session_outdated(self):
|
||||
session_alivetime = time.time() - self.session_max_age
|
||||
|
|
|
@ -5,6 +5,7 @@ from modem_frametypes import FRAME_TYPE
|
|||
from codec2 import FREEDV_MODE
|
||||
from enum import Enum
|
||||
import time
|
||||
|
||||
class IRS_State(Enum):
|
||||
NEW = 0
|
||||
OPEN_ACK_SENT = 1
|
||||
|
@ -68,6 +69,7 @@ class ARQSessionIRS(arq_session.ARQSession):
|
|||
self.state = IRS_State.NEW
|
||||
self.state_enum = IRS_State # needed for access State enum from outside
|
||||
|
||||
self.type_byte = None
|
||||
self.total_length = 0
|
||||
self.total_crc = ''
|
||||
self.received_data = None
|
||||
|
@ -114,6 +116,7 @@ class ARQSessionIRS(arq_session.ARQSession):
|
|||
self.launch_transmit_and_wait(ack_frame, self.TIMEOUT_CONNECT, mode=FREEDV_MODE.signalling)
|
||||
if not self.abort:
|
||||
self.set_state(IRS_State.OPEN_ACK_SENT)
|
||||
return None, None
|
||||
|
||||
def send_info_ack(self, info_frame):
|
||||
# Get session info from ISS
|
||||
|
@ -121,6 +124,7 @@ class ARQSessionIRS(arq_session.ARQSession):
|
|||
self.total_length = info_frame['total_length']
|
||||
self.total_crc = info_frame['total_crc']
|
||||
self.dx_snr.append(info_frame['snr'])
|
||||
self.type_byte = info_frame['type']
|
||||
|
||||
self.log(f"New transfer of {self.total_length} bytes")
|
||||
self.event_manager.send_arq_session_new(False, self.id, self.dxcall, self.total_length, self.state.name)
|
||||
|
@ -134,7 +138,7 @@ class ARQSessionIRS(arq_session.ARQSession):
|
|||
self.launch_transmit_and_wait(info_ack, self.TIMEOUT_CONNECT, mode=FREEDV_MODE.signalling)
|
||||
if not self.abort:
|
||||
self.set_state(IRS_State.INFO_ACK_SENT)
|
||||
|
||||
return None, None
|
||||
|
||||
def process_incoming_data(self, frame):
|
||||
if frame['offset'] != self.received_bytes:
|
||||
|
@ -174,7 +178,7 @@ class ARQSessionIRS(arq_session.ARQSession):
|
|||
# self.transmitted_acks += 1
|
||||
self.set_state(IRS_State.BURST_REPLY_SENT)
|
||||
self.launch_transmit_and_wait(ack, self.TIMEOUT_DATA, mode=FREEDV_MODE.signalling)
|
||||
return
|
||||
return None, None
|
||||
|
||||
if self.final_crc_matches():
|
||||
self.log("All data received successfully!")
|
||||
|
@ -192,6 +196,7 @@ class ARQSessionIRS(arq_session.ARQSession):
|
|||
self.event_manager.send_arq_session_finished(
|
||||
False, self.id, self.dxcall, True, self.state.name, data=self.received_data, statistics=self.calculate_session_statistics())
|
||||
|
||||
return self.received_data, self.type_byte
|
||||
else:
|
||||
|
||||
ack = self.frame_factory.build_arq_burst_ack(self.id,
|
||||
|
@ -207,7 +212,7 @@ class ARQSessionIRS(arq_session.ARQSession):
|
|||
self.set_state(IRS_State.FAILED)
|
||||
self.event_manager.send_arq_session_finished(
|
||||
False, self.id, self.dxcall, False, self.state.name, statistics=self.calculate_session_statistics())
|
||||
|
||||
return False, False
|
||||
|
||||
def calibrate_speed_settings(self):
|
||||
self.speed_level = 0 # for now stay at lowest speed level
|
||||
|
@ -230,4 +235,5 @@ class ARQSessionIRS(arq_session.ARQSession):
|
|||
self.launch_transmit_and_wait(stop_ack, self.TIMEOUT_CONNECT, mode=FREEDV_MODE.signalling)
|
||||
self.set_state(IRS_State.ABORTED)
|
||||
self.event_manager.send_arq_session_finished(
|
||||
False, self.id, self.dxcall, False, self.state.name, statistics=self.calculate_session_statistics())
|
||||
False, self.id, self.dxcall, False, self.state.name, statistics=self.calculate_session_statistics())
|
||||
return None, None
|
|
@ -53,13 +53,13 @@ class ARQSessionISS(arq_session.ARQSession):
|
|||
}
|
||||
}
|
||||
|
||||
def __init__(self, config: dict, modem, dxcall: str, data: bytearray, state_manager):
|
||||
def __init__(self, config: dict, modem, dxcall: str, state_manager, data: bytearray, type_byte: bytes):
|
||||
super().__init__(config, modem, dxcall)
|
||||
self.state_manager = state_manager
|
||||
self.data = data
|
||||
self.total_length = len(data)
|
||||
self.data_crc = ''
|
||||
|
||||
self.type_byte = type_byte
|
||||
self.confirmed_bytes = 0
|
||||
|
||||
self.state = ISS_State.NEW
|
||||
|
@ -119,11 +119,13 @@ class ARQSessionISS(arq_session.ARQSession):
|
|||
|
||||
info_frame = self.frame_factory.build_arq_session_info(self.id, self.total_length,
|
||||
helpers.get_crc_32(self.data),
|
||||
self.snr[0])
|
||||
self.snr[0], self.type_byte)
|
||||
|
||||
self.launch_twr(info_frame, self.TIMEOUT_CONNECT_ACK, self.RETRIES_CONNECT, mode=FREEDV_MODE.signalling)
|
||||
self.set_state(ISS_State.INFO_SENT)
|
||||
|
||||
return None, None
|
||||
|
||||
def send_data(self, irs_frame):
|
||||
|
||||
self.set_speed_and_frames_per_burst(irs_frame)
|
||||
|
@ -137,15 +139,15 @@ class ARQSessionISS(arq_session.ARQSession):
|
|||
# check if we received an abort flag
|
||||
if irs_frame["flag"]["ABORT"]:
|
||||
self.transmission_aborted(irs_frame)
|
||||
return
|
||||
return None, None
|
||||
|
||||
if irs_frame["flag"]["FINAL"]:
|
||||
if self.confirmed_bytes == self.total_length and irs_frame["flag"]["CHECKSUM"]:
|
||||
self.transmission_ended(irs_frame)
|
||||
return
|
||||
|
||||
else:
|
||||
self.transmission_failed()
|
||||
return
|
||||
return None, None
|
||||
|
||||
payload_size = self.get_data_payload_size()
|
||||
burst = []
|
||||
|
@ -158,6 +160,7 @@ class ARQSessionISS(arq_session.ARQSession):
|
|||
burst.append(data_frame)
|
||||
self.launch_twr(burst, self.TIMEOUT_TRANSFER, self.RETRIES_CONNECT, mode='auto')
|
||||
self.set_state(ISS_State.BURST_SENT)
|
||||
return None, None
|
||||
|
||||
def transmission_ended(self, irs_frame):
|
||||
# final function for sucessfully ended transmissions
|
||||
|
@ -166,6 +169,7 @@ class ARQSessionISS(arq_session.ARQSession):
|
|||
self.log(f"All data transfered! flag_final={irs_frame['flag']['FINAL']}, flag_checksum={irs_frame['flag']['CHECKSUM']}")
|
||||
self.event_manager.send_arq_session_finished(True, self.id, self.dxcall,True, self.state.name, statistics=self.calculate_session_statistics())
|
||||
self.state_manager.remove_arq_iss_session(self.id)
|
||||
return None, None
|
||||
|
||||
def transmission_failed(self, irs_frame=None):
|
||||
# final function for failed transmissions
|
||||
|
@ -173,6 +177,7 @@ class ARQSessionISS(arq_session.ARQSession):
|
|||
self.set_state(ISS_State.FAILED)
|
||||
self.log(f"Transmission failed!")
|
||||
self.event_manager.send_arq_session_finished(True, self.id, self.dxcall,False, self.state.name, statistics=self.calculate_session_statistics())
|
||||
return None, None
|
||||
|
||||
def abort_transmission(self, irs_frame=None):
|
||||
# function for starting the abort sequence
|
||||
|
@ -202,4 +207,5 @@ class ARQSessionISS(arq_session.ARQSession):
|
|||
self.event_manager.send_arq_session_finished(
|
||||
True, self.id, self.dxcall, False, self.state.name, statistics=self.calculate_session_statistics())
|
||||
self.state_manager.remove_arq_iss_session(self.id)
|
||||
return None, None
|
||||
|
||||
|
|
|
@ -3,6 +3,8 @@ import queue
|
|||
from codec2 import FREEDV_MODE
|
||||
import structlog
|
||||
from state_manager import StateManager
|
||||
from arq_data_type_handler import ARQDataTypeHandler
|
||||
|
||||
|
||||
class TxCommand():
|
||||
|
||||
|
@ -13,6 +15,7 @@ class TxCommand():
|
|||
self.event_manager = event_manager
|
||||
self.set_params_from_api(apiParams)
|
||||
self.frame_factory = DataFrameFactory(config)
|
||||
self.arq_data_type_handler = ARQDataTypeHandler()
|
||||
|
||||
def set_params_from_api(self, apiParams):
|
||||
pass
|
||||
|
|
|
@ -13,13 +13,20 @@ class ARQRawCommand(TxCommand):
|
|||
if not api_validations.validate_freedata_callsign(self.dxcall):
|
||||
self.dxcall = f"{self.dxcall}-0"
|
||||
|
||||
try:
|
||||
self.type = apiParams['type']
|
||||
except KeyError:
|
||||
self.type = "raw"
|
||||
|
||||
self.data = base64.b64decode(apiParams['data'])
|
||||
|
||||
def run(self, event_queue: Queue, modem):
|
||||
self.emit_event(event_queue)
|
||||
self.logger.info(self.log_message())
|
||||
|
||||
iss = ARQSessionISS(self.config, modem, self.dxcall, self.data, self.state_manager)
|
||||
prepared_data, type_byte = self.arq_data_type_handler.prepare(self.data, self.type)
|
||||
|
||||
iss = ARQSessionISS(self.config, modem, self.dxcall, self.state_manager, prepared_data, type_byte)
|
||||
if iss.id:
|
||||
self.state_manager.register_arq_iss_session(iss)
|
||||
iss.start()
|
||||
|
|
|
@ -15,7 +15,6 @@ class DataFrameFactory:
|
|||
'FINAL': 0, # Bit-position for indicating the FINAL state
|
||||
'ABORT': 1, # Bit-position for indicating the ABORT request
|
||||
'CHECKSUM': 2, # Bit-position for indicating the CHECKSUM is correct or not
|
||||
'ENABLE_COMPRESSION': 3 # Bit-position for indicating compression is enabled
|
||||
}
|
||||
|
||||
def __init__(self, config):
|
||||
|
@ -118,6 +117,7 @@ class DataFrameFactory:
|
|||
"total_crc": 4,
|
||||
"snr": 1,
|
||||
"flag": 1,
|
||||
"type": 1,
|
||||
}
|
||||
|
||||
self.template_list[FR_TYPE.ARQ_SESSION_INFO_ACK.value] = {
|
||||
|
@ -218,7 +218,7 @@ class DataFrameFactory:
|
|||
|
||||
elif key in ["session_id", "speed_level",
|
||||
"frames_per_burst", "version",
|
||||
"offset", "total_length", "state"]:
|
||||
"offset", "total_length", "state", "type"]:
|
||||
extracted_data[key] = int.from_bytes(data, 'big')
|
||||
|
||||
elif key in ["snr"]:
|
||||
|
@ -350,10 +350,8 @@ class DataFrameFactory:
|
|||
}
|
||||
return self.construct(FR_TYPE.ARQ_SESSION_OPEN_ACK, payload)
|
||||
|
||||
def build_arq_session_info(self, session_id: int, total_length: int, total_crc: bytes, snr, flag_compression=False):
|
||||
def build_arq_session_info(self, session_id: int, total_length: int, total_crc: bytes, snr, type):
|
||||
flag = 0b00000000
|
||||
if flag_compression:
|
||||
flag = helpers.set_flag(flag, 'ENABLE_COMPRESSION', True, self.ARQ_FLAGS)
|
||||
|
||||
payload = {
|
||||
"session_id": session_id.to_bytes(1, 'big'),
|
||||
|
@ -361,6 +359,7 @@ class DataFrameFactory:
|
|||
"total_crc": total_crc,
|
||||
"snr": helpers.snr_to_bytes(1),
|
||||
"flag": flag.to_bytes(1, 'big'),
|
||||
"type": type.to_bytes(1, 'big'),
|
||||
|
||||
}
|
||||
return self.construct(FR_TYPE.ARQ_SESSION_INFO, payload)
|
||||
|
@ -377,7 +376,6 @@ class DataFrameFactory:
|
|||
}
|
||||
return self.construct(FR_TYPE.ARQ_STOP_ACK, payload)
|
||||
|
||||
|
||||
def build_arq_session_info_ack(self, session_id, total_crc, snr, speed_level, frames_per_burst, flag_final=False, flag_abort=False):
|
||||
flag = 0b00000000
|
||||
if flag_final:
|
||||
|
|
|
@ -31,7 +31,6 @@ class FrameHandler():
|
|||
def is_frame_for_me(self):
|
||||
call_with_ssid = self.config['STATION']['mycall'] + "-" + str(self.config['STATION']['myssid'])
|
||||
ft = self.details['frame']['frame_type']
|
||||
print(self.details)
|
||||
valid = False
|
||||
# Check for callsign checksum
|
||||
if ft in ['ARQ_SESSION_OPEN', 'ARQ_SESSION_OPEN_ACK', 'PING', 'PING_ACK']:
|
||||
|
|
|
@ -190,7 +190,6 @@ class radio:
|
|||
try:
|
||||
mode, bandwidth = response.split('\n', 1) # Split the response into mode and bandwidth
|
||||
except ValueError:
|
||||
print(response)
|
||||
mode = 'err'
|
||||
bandwidth = 'err'
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ app = Flask(__name__)
|
|||
CORS(app)
|
||||
CORS(app, resources={r"/*": {"origins": "*"}})
|
||||
sock = Sock(app)
|
||||
MODEM_VERSION = "0.12.0-alpha"
|
||||
MODEM_VERSION = "0.12.1-alpha"
|
||||
|
||||
# set config file to use
|
||||
def set_config():
|
||||
|
|
|
@ -126,12 +126,13 @@ class TestARQSession(unittest.TestCase):
|
|||
|
||||
def testARQSessionSmallPayload(self):
|
||||
# set Packet Error Rate (PER) / frame loss probability
|
||||
self.loss_probability = 50
|
||||
self.loss_probability = 0
|
||||
|
||||
self.establishChannels()
|
||||
params = {
|
||||
'dxcall': "XX1XXX-1",
|
||||
'data': base64.b64encode(bytes("Hello world!", encoding="utf-8")),
|
||||
'type': "raw_lzma"
|
||||
}
|
||||
cmd = ARQRawCommand(self.config, self.iss_state_manager, self.iss_event_queue, params)
|
||||
cmd.run(self.iss_event_queue, self.iss_modem)
|
||||
|
@ -146,6 +147,7 @@ class TestARQSession(unittest.TestCase):
|
|||
params = {
|
||||
'dxcall': "XX1XXX-1",
|
||||
'data': base64.b64encode(np.random.bytes(1000)),
|
||||
'type': "raw_lzma"
|
||||
}
|
||||
cmd = ARQRawCommand(self.config, self.iss_state_manager, self.iss_event_queue, params)
|
||||
cmd.run(self.iss_event_queue, self.iss_modem)
|
||||
|
|
37
tests/test_data_type_handler.py
Normal file
37
tests/test_data_type_handler.py
Normal file
|
@ -0,0 +1,37 @@
|
|||
import sys
|
||||
sys.path.append('modem')
|
||||
|
||||
import unittest
|
||||
from arq_data_type_handler import ARQDataTypeHandler
|
||||
|
||||
class TestDispatcher(unittest.TestCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.arq_data_type_handler = ARQDataTypeHandler()
|
||||
|
||||
|
||||
def testDataTypeHandlerRaw(self):
|
||||
# Example usage
|
||||
example_data = b"Hello FreeDATA!"
|
||||
formatted_data, type_byte = self.arq_data_type_handler.prepare(example_data, "raw")
|
||||
dispatched_data = self.arq_data_type_handler.dispatch(type_byte, formatted_data)
|
||||
self.assertEqual(example_data, dispatched_data)
|
||||
|
||||
def testDataTypeHandlerLZMA(self):
|
||||
# Example usage
|
||||
example_data = b"Hello FreeDATA!"
|
||||
formatted_data, type_byte = self.arq_data_type_handler.prepare(example_data, "raw_lzma")
|
||||
dispatched_data = self.arq_data_type_handler.dispatch(type_byte, formatted_data)
|
||||
self.assertEqual(example_data, dispatched_data)
|
||||
|
||||
def testDataTypeHandlerGZIP(self):
|
||||
# Example usage
|
||||
example_data = b"Hello FreeDATA!"
|
||||
formatted_data, type_byte = self.arq_data_type_handler.prepare(example_data, "raw_gzip")
|
||||
dispatched_data = self.arq_data_type_handler.dispatch(type_byte, formatted_data)
|
||||
self.assertEqual(example_data, dispatched_data)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
Loading…
Reference in a new issue