moved to sounddevice

changed audio library, changed ssid behavior, minor chat changed
This commit is contained in:
dj2ls 2022-03-24 20:49:13 +01:00
parent 3d24d47ee5
commit 15217b2521
10 changed files with 760 additions and 450 deletions

View file

@ -13,7 +13,8 @@ const exec = require('child_process').spawn;
const log = require('electron-log');
const mainLog = log.scope('main');
const daemonProcessLog = log.scope('freedata-daemon');
const mime = require('mime');
const sysInfo = log.scope('system information');
sysInfo.info("SYSTEM INFORMATION ----------------------------- ");
sysInfo.info("APP VERSION : " + app.getVersion());
@ -466,9 +467,10 @@ ipcMain.on('request-open-tnc-log', (event) => {
//folder selector
ipcMain.on('get-folder-path',(event,data)=>{
dialog.showOpenDialog({defaultPath: path.join(__dirname, '../assets/'),
dialog.showOpenDialog({defaultPath: path.join(__dirname, '../'),
buttonLabel: 'Select folder', properties: ['openDirectory']}).then(folderPaths => {
win.webContents.send('return-folder-paths', {path: folderPaths,})
win.webContents.send('return-folder-paths', {path: folderPaths,})
});
});
@ -477,8 +479,52 @@ ipcMain.on('open-folder',(event,data)=>{
shell.showItemInFolder(data.path)
});
//select file
ipcMain.on('select-file',(event,data)=>{
dialog.showOpenDialog({defaultPath: path.join(__dirname, '../'),
buttonLabel: 'Select file', properties: ['openFile']}).then(filepath => {
console.log(filepath.filePaths[0])
try {
fs.readFile(filepath.filePaths[0], 'utf8', function (err, data) {
//fs.readFile(filepath.filePaths[0], function (err, data) {
var filename = path.basename(filepath.filePaths[0])
var mimeType = mime.getType(filename)
chat.webContents.send('return-selected-files', {data : data, mime: mimeType, filename: filename})
})
} catch (err) {
console.log(err);
}
});
});
//save file to folder
ipcMain.on('save-file-to-folder',(event,data)=>{
console.log(data)
dialog.showSaveDialog({defaultPath: path.join(__dirname, '../')}).then(filepath => {
console.log(filepath.filePath)
try {
fs.writeFile(filepath.filePath, data.file, function (err, data) {
})
} catch (err) {
console.log(err);
}
});
});
// LISTENER FOR UPDATER EVENTS
autoUpdater.on('update-available', (info) => {

View file

@ -37,6 +37,7 @@
"electron-updater": "^5.0.0",
"emoji-picker-element": "^1.11.0",
"emoji-picker-element-data": "^1.3.0",
"mime": "^3.0.0",
"pouchdb": "^7.2.2",
"pouchdb-find": "^7.2.2",
"qth-locator": "^2.1.0",

View file

@ -2,314 +2,331 @@ const path = require('path')
const {
ipcRenderer
} = require('electron')
const { v4: uuidv4 } = require('uuid');
const {
v4: uuidv4
} = require('uuid');
const utf8 = require('utf8');
// https://stackoverflow.com/a/26227660
var appDataFolder = process.env.APPDATA || (process.platform == 'darwin' ? process.env.HOME + '/Library/Application Support' : process.env.HOME + "/.config")
var configFolder = path.join(appDataFolder, "FreeDATA");
var configPath = path.join(configFolder, 'config.json')
const config = require(configPath);
// set date format
const dateFormat = new Intl.DateTimeFormat('en-GB', {
timeStyle: 'long',
dateStyle: 'full'
});
// set date format information
const dateFormatShort = new Intl.DateTimeFormat('en-GB', {
year: 'numeric',
month: 'numeric',
day: 'numeric',
hour: 'numeric',
minute: 'numeric',
second: 'numeric',
hour12: false,
});
// split character
const split_char = '\0;'
// global for our selected file we want to transmit
var filetype = '';
var file = '';
var filename = '';
var chatDB = path.join(configFolder, 'chatDB')
// ---- MessageDB
var PouchDB = require('pouchdb');
PouchDB.plugin(require('pouchdb-find'));
var db = new PouchDB(chatDB);
// get all messages from database
//var messages = db.get("messages").value()
// get all dxcallsigns in database
var dxcallsigns = new Set();
db.createIndex({
index: {
fields: ['timestamp', 'uuid', 'dxcallsign', 'dxgrid', 'msg', 'checksum', 'type', 'command', 'status']
}
}).then(function (result) {
// handle result
console.log(result)
}).catch(function (err) {
console.log(err);
});
db.find({
selector: {
timestamp: {$exists: true}},
sort: [{'timestamp': 'asc'}]
}).then(function (result) {
// handle result
console.log(result);
console.log(typeof(result));
if(typeof(result) !== 'undefined'){
result.docs.forEach(function(item) {
update_chat(item);
});
index: {
fields: ['timestamp', 'uuid', 'dxcallsign', 'dxgrid', 'msg', 'checksum', 'type', 'command', 'status', '_attachments']
}
}).catch(function (err) {
console.log(err);
}).then(function(result) {
// handle result
console.log(result)
}).catch(function(err) {
console.log(err);
});
db.find({
selector: {
timestamp: {
$exists: true
}
},
sort: [{
'timestamp': 'asc'
}]
}).then(function(result) {
// handle result
if (typeof(result) !== 'undefined') {
result.docs.forEach(function(item) {
console.log(item)
update_chat(item);
});
}
}).catch(function(err) {
console.log(err);
});
// WINDOW LISTENER
window.addEventListener('DOMContentLoaded', () => {
document.querySelector('emoji-picker').addEventListener("emoji-click", (event) => {
document.getElementById('chatModuleMessage').setRangeText(event.detail.emoji.unicode)
console.log(event.detail);
})
document.getElementById("emojipickerbutton").addEventListener("click", () => {
var element = document.getElementById("emojipickercontainer")
console.log(element.style.display);
if (element.style.display === "none") {
element.style.display = "block";
} else {
element.style.display = "none";
}
})
document.querySelector('emoji-picker').addEventListener("emoji-click", (event) => {
document.getElementById('chatModuleMessage').setRangeText(event.detail.emoji.unicode)
console.log(event.detail);
})
document.getElementById("emojipickerbutton").addEventListener("click", () => {
var element = document.getElementById("emojipickercontainer")
console.log(element.style.display);
if (element.style.display === "none") {
element.style.display = "block";
} else {
element.style.display = "none";
}
})
document.getElementById("selectFiles").addEventListener("click", () => {
ipcRenderer.send('select-file', {
title: 'Title',
});
})
// SEND MSG
document.getElementById("sendMessage").addEventListener("click", () => {
document.getElementById('emojipickercontainer').style.display = "none";
var dxcallsign = document.getElementById('chatModuleDxCall').value;
dxcallsign = dxcallsign.toUpperCase();
var chatmessage = document.getElementById('chatModuleMessage').value;
//chatmessage = Buffer.from(chatmessage, 'utf-8').toString();
var uuid = uuidv4();
console.log(chatmessage)
let Data = {
command: "send_message",
dxcallsign : dxcallsign,
mode : 255,
frames : 1,
data : chatmessage,
checksum : '123',
uuid : uuid
};
ipcRenderer.send('run-tnc-command', Data);
document.getElementById('emojipickercontainer').style.display = "none";
var dxcallsign = document.getElementById('chatModuleDxCall').value;
dxcallsign = dxcallsign.toUpperCase();
var chatmessage = document.getElementById('chatModuleMessage').value;
var data_with_attachment = chatmessage + split_char + filename + split_char + filetype + split_char + file;
document.getElementById('selectFiles').innerHTML = `
<i class="bi bi-paperclip" style="font-size: 1.2rem; color: white;"></i>
`;
var uuid = uuidv4();
console.log(chatmessage)
let Data = {
command: "send_message",
dxcallsign: dxcallsign,
mode: 255,
frames: 1,
data: data_with_attachment,
checksum: '123',
uuid: uuid
};
ipcRenderer.send('run-tnc-command', Data);
db.post({
_id: uuid,
timestamp: Math.floor(Date.now() / 1000),
dxcallsign: dxcallsign,
dxgrid: 'NULL',
msg: chatmessage,
checksum: 'NULL',
type: "transmit",
status: 'transmit',
uuid: uuid
}).then(function (response) {
// handle response
console.log("new database entry");
console.log(response);
}).catch(function (err) {
console.log(err);
_id: uuid,
timestamp: Math.floor(Date.now() / 1000),
dxcallsign: dxcallsign,
dxgrid: 'NULL',
msg: chatmessage,
checksum: 'NULL',
type: "transmit",
status: 'transmit',
uuid: uuid,
_attachments: {
[filename]: {
content_type: filetype,
data: new Buffer(file)
}
}
}).then(function(response) {
// handle response
console.log("new database entry");
console.log(response);
}).catch(function(err) {
console.log(err);
});
db.get(uuid, [{
attachments: true
}]).then(function(doc) {
// handle doc
update_chat(doc)
}).catch(function(err) {
console.log(err);
});
db.get(uuid).then(function (doc) {
// handle doc
update_chat(doc)
}).catch(function (err) {
console.log(err);
});
// scroll to bottom
var element = document.getElementById("message-container");
element.scrollTo(0,element.scrollHeight);
element.scrollTo(0, element.scrollHeight);
// clear input
document.getElementById('chatModuleMessage').value = ''
document.getElementById('chatModuleMessage').value = ''
})
// cleanup after transmission
filetype = '';
file = '';
filename = '';
});
ipcRenderer.on('return-selected-files', (event, arg) => {
filetype = arg.mime;
file = arg.data;
filename = arg.filename;
document.getElementById('selectFiles').innerHTML = `
<i class="bi bi-paperclip" style="font-size: 1.2rem; color: white;"></i>
<span class="position-absolute top-0 start-100 translate-middle p-2 bg-danger border border-light rounded-circle">
<span class="visually-hidden">New file selected</span>
</span>
`;
});
ipcRenderer.on('action-update-transmission-status', (event, arg) => {
console.log(arg.status);
console.log(arg.uuid);
db.get(arg.uuid).then(function(doc) {
return db.put({
_id: arg.uuid,
_rev: doc._rev,
timestamp: doc.timestamp,
dxcallsign: doc.dxcallsign,
dxgrid: doc.dxgrid,
msg: doc.msg,
checksum: doc.checksum,
type: "transmit",
status: arg.status,
uuid: doc.uuid
});
}).then(function(response) {
// handle response
db.get(arg.uuid).then(function (doc) {
// handle doc
update_chat(doc);
}).catch(function (err) {
console.log(err);
});
}).catch(function (err) {
console.log(err);
});
});
ipcRenderer.on('action-new-msg-received', (event, arg) => {
console.log(arg);
console.log(arg.data);
var new_msg = arg.data;
new_msg.forEach(function(item) {
console.log(item);
//for (i = 0; i < arg.data.length; i++) {
let obj = new Object();
var encoded_data = atob(item.data);
var splitted_data = encoded_data.split(split_char);
console.log(utf8.decode(splitted_data[3]));
//obj.uuid = item.uuid;
item.command = splitted_data[1];
item.checksum = splitted_data[2];
// convert message to unicode from utf8 because of emojis
item.uuid = utf8.decode(splitted_data[3]);
item.msg = utf8.decode(splitted_data[4]);
//obj.dxcallsign = item.dxcallsign;
//obj.dxgrid = item.dxgrid;
//obj.timestamp = item.timestamp;
item.status = 'null';
// check if message not exists in database.
// this might cause big cpu load of file is getting too big
/*
if(!JSON.stringify(db.get("messages")).includes(item.uuid)){
console.log("new message: " + item);
db.get("messages").push(item).save();
}
*/
db.put({
_id: item.uuid,
timestamp: item.timestamp,
uuid: item.uuid,
dxcallsign: item.dxcallsign,
dxgrid: item.dxgrid,
msg: item.msg,
checksum: item.checksum,
type : "received",
command : item.command,
status : item.status
}).then(function (response) {
// handle response
console.log("new database entry");
console.log(response);
}).catch(function (err) {
console.log(err);
db.get(arg.uuid, {
attachments: true
}).then(function(doc) {
return db.put({
_id: arg.uuid,
_rev: doc._rev,
timestamp: doc.timestamp,
dxcallsign: doc.dxcallsign,
dxgrid: doc.dxgrid,
msg: doc.msg,
checksum: doc.checksum,
type: "transmit",
status: arg.status,
uuid: doc.uuid
});
db.get(item.uuid).then(function (doc) {
// handle doc
// timestamp
update_chat(doc);
}).catch(function (err) {
console.log(err);
}).then(function(response) {
// handle response
db.get(arg.uuid, [{
attachments: true
}]).then(function(doc) {
// handle doc
update_chat(doc);
}).catch(function(err) {
console.log(err);
});
console.log("...................................")
return
}).catch(function(err) {
console.log(err);
});
});
ipcRenderer.on('action-new-msg-received', (event, arg) => {
console.log(arg.data)
var new_msg = arg.data;
new_msg.forEach(function(item) {
console.log(item)
let obj = new Object();
if (item.type == 'ping') {
obj.timestamp = item.timestamp;
obj.dxcallsign = item.dxcallsign;
obj.dxgrid = item.dxgrid;
obj.uuid = item.uuid;
obj.command = 'ping';
obj.checksum = 'null';
obj.msg = 'null';
obj.status = item.status;
obj.snr = item.snr;
obj.type = item.type;
} else if (item.type == 'beacon') {
obj.timestamp = item.timestamp;
obj.dxcallsign = item.dxcallsign;
obj.dxgrid = item.dxgrid;
obj.uuid = item.uuid;
obj.command = 'beacon';
obj.checksum = 'null';
obj.msg = 'null';
obj.status = item.status;
obj.snr = item.snr;
obj.type = item.type;
} else if (item.arq == 'received') {
var encoded_data = atob(item.data);
var splitted_data = encoded_data.split(split_char);
console.log(splitted_data)
//console.log(utf8.decode(splitted_data[3]));
// check if message
console.log(splitted_data[0])
obj.timestamp = item.timestamp;
obj.dxcallsign = item.dxcallsign;
obj.dxgrid = item.dxgrid;
//obj.uuid = item.uuid;
obj.command = splitted_data[1];
obj.checksum = splitted_data[2];
// convert message to unicode from utf8 because of emojis
obj.uuid = utf8.decode(splitted_data[3]);
obj.msg = utf8.decode(splitted_data[4]);
obj.status = 'null';
obj.snr = 'null';
obj.type = 'received';
obj.filename = utf8.decode(splitted_data[5]);
obj.filetype = utf8.decode(splitted_data[6]);
obj.file = utf8.decode(splitted_data[7]);
try{
//var file = btoa(obj.file)
} catch (error) {
}
db.put({
_id: obj.uuid,
timestamp: obj.timestamp,
uuid: obj.uuid,
dxcallsign: obj.dxcallsign,
dxgrid: obj.dxgrid,
msg: obj.msg,
checksum: obj.checksum,
type: obj.type,
command: obj.command,
status: obj.status,
snr: obj.snr,
_attachments: {
[obj.filename]: {
content_type: obj.filetype,
data: new Buffer(obj.file)
}
}
}).then(function(response) {
// handle response
console.log("new database entry");
console.log(response);
}).catch(function(err) {
console.log(err);
});
db.get(item.uuid, {
attachments: true
}).then(function(doc) {
// handle doc
// timestamp
console.log(doc)
update_chat(doc);
}).catch(function(err) {
console.log(err);
});
}
});
});
// Update chat list
update_chat = function(obj) {
console.log(obj);
var dxcallsign = obj.dxcallsign;
var timestamp = dateFormat.format(obj.timestamp * 1000);
var dxgrid = obj.dxgrid;
//console.log(obj);
var dxcallsign = obj.dxcallsign;
var timestamp = dateFormat.format(obj.timestamp * 1000);
var timestampShort = dateFormatShort.format(obj.timestamp * 1000);
var dxgrid = obj.dxgrid;
if (typeof(obj._attachments) !== 'undefined') {
//var filename = obj._attachments;
var filename = Object.keys(obj._attachments)[0]
var filetype = filename.split('.')[1]
var filesize = filename.length + " Bytes";
var fileheader = `
<div class="card-header text-end p-0 mb-0">
<p class="text-right mb-0 p-1 text-black" style="text-align: right; font-size : 1rem">
<span class="badge bg-secondary text-white p-1">${filename}</span>
<span class="badge bg-secondary text-white p-1">${filesize}</span>
<i class="bi bi-filetype-${filetype}" style="font-size: 3rem; color: black;"></i>
<button type="button btn-sm" id="save-file-msg-${obj._id}" class="btn btn-light"><i class="bi bi-box-arrow-in-down" style="font-size: 0.9rem; color: black;"></i></button>
</p>
</div>
`;
} else {
var filename = ''
var fileheader = ''
}
// CALLSIGN LIST
if(!(document.getElementById('chat-' + dxcallsign + '-list'))){
if (!(document.getElementById('chat-' + dxcallsign + '-list'))) {
var new_callsign = `
<a class="list-group-item list-group-item-action rounded-4 border-1 mb-2" id="chat-${dxcallsign}-list" data-bs-toggle="list" href="#chat-${dxcallsign}" role="tab" aria-controls="chat-${dxcallsign}">
<div class="d-flex w-100 justify-content-between">
@ -321,80 +338,196 @@ console.log(obj);
`;
document.getElementById('list-tab').insertAdjacentHTML("beforeend", new_callsign);
var message_area = `
<div class="tab-pane fade" id="chat-${dxcallsign}" role="tabpanel" aria-labelledby="chat-${dxcallsign}-list"></div>
`;
document.getElementById('nav-tabContent').insertAdjacentHTML("beforeend", message_area);
document.getElementById('nav-tabContent').insertAdjacentHTML("beforeend", message_area);
// create eventlistener for listening on clicking on a callsign
document.getElementById('chat-' + dxcallsign + '-list').addEventListener('click', function() {
document.getElementById('chatModuleDxCall').value = dxcallsign;
// scroll to bottom
var element = document.getElementById("message-container");
element.scrollTo(0,element.scrollHeight);
});
element.scrollTo(0, element.scrollHeight);
});
}
// APPEND MESSAGES TO CALLSIGN
if (obj.status == 'transmit'){
var message_class = 'card text-right border-primary bg-primary';
}else if (obj.status == 'transmitting'){
var message_class = 'card text-right border-warning bg-warning';
}else if (obj.status == 'failed'){
var message_class = 'card text-right border-danger bg-danger';
}else if (obj.status == 'success'){
var message_class = 'card text-right border-success bg-success';
} else {
var message_class = 'card text-right border-secondary bg-secondary';
}
if(!(document.getElementById('msg-' + obj._id))){
if (obj.type == 'received'){
if (obj.status == 'transmit') {
var status = '<i class="bi bi-arrow-left"></i>';
} else if (obj.status == 'transmitting') {
var status = '<i class="bi bi-arrow-left-right"></i>';
} else if (obj.status == 'failed') {
var status = '<i class="bi bi-x-square"></i>';
} else if (obj.status == 'success') {
var status = '<i class="bi bi-check-square"></i>';
} else {
var status = '<i class="bi bi-question-square"></i>';
}
if (!(document.getElementById('msg-' + obj._id))) {
if (obj.type == 'ping') {
var new_message = `
<div class="mt-3 p-1 rounded mb-0 w-100 bg-secondary" id="msg-${obj._id}">
<p class="font-monospace text-small text-white mb-0 text-break">PING ACK - snr: ${obj.snr} - ${timestampShort} </p>
</div>
`;
}
if (obj.type == 'beacon') {
var new_message = `
<div class="mt-3 p-1 rounded mb-0 w-100 bg-info" id="msg-${obj._id}">
<p class="font-monospace text-small text-white mb-0 text-break">BEACON - snr: ${obj.snr} - ${timestampShort} </p>
</div>
`;
}
if (obj.type == 'received') {
var new_message = `
<div class="mt-3 mb-0 w-75" id="msg-${obj._id}">
<p class="font-monospace text-small mb-0 text-muted text-break">${timestamp}</p>
<!--<p class="font-monospace text-small mb-0 text-muted text-break">${timestamp}</p>-->
<div class="card border-light bg-light" id="msg-${obj._id}">
<div class="card-body">
<p class="card-text text-break text-wrap">${obj.msg}</p>
${fileheader}
<div class="card-body p-0">
<p class="card-text p-2 mb-0 text-break text-wrap">${obj.msg}</p>
<p class="text-right mb-0 p-1 text-white" style="text-align: left; font-size : 0.9rem">
<span class="badge bg-light text-muted">${timestamp}</span>
</p>
</div>
</div>
</div>
`;
}
if (obj.type == 'transmit'){
if (obj.type == 'transmit') {
var new_message = `
<div class="ml-auto mt-3 mb-0 w-75" style="margin-left: auto;">
<p class="font-monospace text-right mb-0 text-muted" style="text-align: right;">${timestamp}</p>
<div class="${message_class}" id="msg-${obj._id}">
<div class="card-body">
<p class="card-text text-white text-break text-wrap">${obj.msg}</p>
<!--<p class="font-monospace text-right mb-0 text-muted" style="text-align: right;">${timestamp}</p>-->
<div class="card rounded" id="msg-${obj._id}">
${fileheader}
<div class="card-body p-0 text-right bg-primary">
<p class="card-text p-2 mb-0 text-white text-break text-wrap">${obj.msg}</p>
<p class="text-right mb-0 p-1 text-white" style="text-align: right; font-size : 0.9rem">
<span class="badge bg-light text-dark">${timestamp}</span>
<span class="badge bg-light text-dark">${status}</span>
<button type="button" id="retransmit-msg-${obj._id}" class="btn btn-light"><i class="bi bi-arrow-repeat" style="font-size: 0.9rem; color: black;"></i></button>
</p>
</div>
</div>
</div>
`;
}
var id = "chat-" + obj.dxcallsign
document.getElementById(id).insertAdjacentHTML("beforeend", new_message);
var element = document.getElementById("message-container");
element.scrollTo(0,element.scrollHeight);
} else if(document.getElementById('msg-' + obj._id)) {
id = "msg-" + obj._id;
document.getElementById(id).className = message_class;
}
`;
}
// CHECK CHECK CHECK --> This could be done better
var id = "chat-" + obj.dxcallsign
document.getElementById(id).insertAdjacentHTML("beforeend", new_message);
var element = document.getElementById("message-container");
element.scrollTo(0, element.scrollHeight);
} else if (document.getElementById('msg-' + obj._id)) {
id = "msg-" + obj._id;
//document.getElementById(id).className = message_class;
}
// CREATE SAVE TO FOLDER EVENT LISTENER
if ((document.getElementById('save-file-msg-' + obj._id))) {
document.getElementById('save-file-msg-' + obj._id).addEventListener("click", () => {
saveFileToFilder(obj._id)
});
}
// CREATE RESEND MSG EVENT LISTENER
if ((document.getElementById('retransmit-msg-' + obj._id))) {
document.getElementById('retransmit-msg-' + obj._id).addEventListener("click", () => {
db.get(obj._id, {
attachments: true
}).then(function(doc) {
// handle doc
console.log(doc)
var filename = Object.keys(obj._attachments)[0]
var filetype = Object.keys(obj._attachments)[0].content_type
var file = Object.keys(obj._attachments)[0].data
var data_with_attachment = doc.msg + split_char + filename + split_char + filetype + split_char + file;
let Data = {
command: doc.command,
dxcallsign: doc.dxcallsign,
mode: doc.mode,
frames: doc.frames,
data: data_with_attachment,
checksum: doc.checksum,
uuid: doc.uuid
};
ipcRenderer.send('run-tnc-command', Data);
}).catch(function(err) {
console.log(err);
});
});
};
}
function getObjByID(id) {
/*
{
"timestamp": 1648139683,
"dxcallsign": "DN2LS-0",
"dxgrid": "NULL",
"msg": "",
"checksum": "NULL",
"type": "transmit",
"status": "transmit",
"uuid": "5b72a46c-49cf-40d6-8936-a64c95bc3da7",
"_attachments": {
"CMakeLists.txt": {
"content_type": "text/plain",
"digest": "md5-Cdk6Ol6uuJ7Gj5lin9o4SQ==",
"length": 7802,
"revpos": 1,
"stub": true
}
},
"_id": "5b72a46c-49cf-40d6-8936-a64c95bc3da7",
"_rev": "1-6df2d7227c4f89f8a3a2b4978661dd79"
}
**/
db.get(id, {
attachments: true
}).then(function(doc) {
return obj
}).catch(function(err) {
console.log(err);
return false
});
}
function saveFileToFilder(id) {
db.get(id,{attachments: true}).then(function(obj) {
console.log(obj)
console.log(Object.keys(obj._attachments)[0].content_type)
var filename = Object.keys(obj._attachments)[0]
var filetype = filename.content_type
var file = Object.keys(obj._attachments)[0].data
db.getAttachment(id, filename).then(function (data) {
// handle result
console.log(data)
let Data = {
file: data,
filename: filename,
filetype: filetype,
}
console.log(Data)
ipcRenderer.send('save-file-to-folder', Data);
}).catch(function(err) {
console.log(err);
return false
});
}).catch(function (err) {
console.log(err);
});
}

View file

@ -777,7 +777,7 @@ document.getElementById('openReceivedFilesFolder').addEventListener('click', ()
var fileList = document.getElementById("dataModalFile").files;
console.log(fileList)
var reader = new FileReader();
reader.readAsBinaryString(fileList[0]);
//reader.readAsDataURL(fileList[0]);

View file

@ -233,6 +233,16 @@ client.on('data', function(socketdata) {
}
// Check for Ping
if (data['type'] == 'ping') {
ipcRenderer.send('request-new-msg-received', {data: [data]});
}
// Check for Beacon
if (data['type'] == 'beacon') {
ipcRenderer.send('request-new-msg-received', {data: [data]});
}
/* A TEST WITH STREAMING DATA .... */
// if we received data through network stream, we get a single data item
if (data['arq'] == 'received') {
@ -251,6 +261,7 @@ client.on('data', function(socketdata) {
if(splitted_data[0] == 'm'){
messageArray.push(data)
console.log(data)
}
rxBufferLengthGui = dataArray.length
@ -258,13 +269,12 @@ client.on('data', function(socketdata) {
data: dataArray,
};
ipcRenderer.send('request-update-rx-buffer', Files);
ipcRenderer.send('request-new-msg-received', Files);
rxMsgBufferLengthGui = messageArray.length
let Messages = {
data: messageArray,
};
//ipcRenderer.send('request-update-rx-msg-buffer', Messages);
ipcRenderer.send('request-new-msg-received', Messages);
}

View file

@ -68,13 +68,14 @@
<input class="form-control" maxlength="8" style="max-width: 6rem; text-transform:uppercase" id="chatModuleDxCall" placeholder="DX CALL"></input>
<input class="form-control" id="chatModuleMessage" placeholder="Message"></input>
<button class="btn btn-sm btn-primary me-2" id="emojipickerbutton" type="button"><i class="bi bi-emoji-smile"></i></button>
<button class="btn btn-sm btn-primary me-2" style="width: 3rem" id="selectFiles" type="button"><i class="bi bi-paperclip" style="font-size: 1.2rem; color: white;"></i></button>
<button class="btn btn-sm btn-secondary me-2" style="width: 5rem" id="sendMessage" type="button"><i class="bi bi-send" style="font-size: 1.2rem; color: white;"></i></button>
</div>
@ -87,7 +88,6 @@
</div>

View file

@ -3,6 +3,9 @@ import json
import sys
import multiprocessing
import sounddevice as sd
'''
####################################################
# https://stackoverflow.com/questions/7088672/pyaudio-working-but-spits-out-error-messages-each-time
# https://github.com/DJ2LS/FreeDATA/issues/22
@ -42,6 +45,7 @@ def noalsaerr():
# with noalsaerr():
# p = pyaudio.PyAudio()
'''
#####################################################
def get_audio_devices():
@ -75,6 +79,7 @@ def fetch_audio_devices(input_devices, output_devices):
Returns:
"""
'''
# UPDATE LIST OF AUDIO DEVICES
try:
# we need to "try" this, because sometimes libasound.so isn't in the default place
@ -87,21 +92,29 @@ def fetch_audio_devices(input_devices, output_devices):
#input_devices = []
#output_devices = []
for i in range(0, p.get_device_count()):
'''
devices = sd.query_devices(device=None, kind=None)
index = 0
for device in devices:
#for i in range(0, p.get_device_count()):
# we need to do a try exception, beacuse for windows theres no audio device range
try:
maxInputChannels = p.get_device_info_by_host_api_device_index(0, i).get('maxInputChannels')
maxOutputChannels = p.get_device_info_by_host_api_device_index(0, i).get('maxOutputChannels')
name = p.get_device_info_by_host_api_device_index(0, i).get('name')
#maxInputChannels = p.get_device_info_by_host_api_device_index(0, i).get('maxInputChannels')
#maxOutputChannels = p.get_device_info_by_host_api_device_index(0, i).get('maxOutputChannels')
#name = p.get_device_info_by_host_api_device_index(0, i).get('name')
name = device["name"]
maxOutputChannels = device["max_output_channels"]
maxInputChannels = device["max_input_channels"]
except:
maxInputChannels = 0
maxOutputChannels = 0
name = ''
if maxInputChannels > 0:
input_devices.append({"id": i, "name": str(name)})
input_devices.append({"id": index, "name": str(name)})
if maxOutputChannels > 0:
output_devices.append({"id": i, "name": str(name)})
p.terminate()
output_devices.append({"id": index, "name": str(name)})
index += 1
#p.terminate()

View file

@ -17,6 +17,8 @@ class FREEDV_MODE(Enum):
"""
enum for codec2 modes and names
"""
fsk_ldpc_0 = 200
fsk_ldpc_1 = 201
fsk_ldpc = 9
datac0 = 14
datac1 = 10
@ -170,7 +172,7 @@ adv.codename = 'H_128_256_5'.encode('utf-8') # code word
HRA_112_112 rate 0.50 (224,112) BPF: 14 not working
HRA_56_56 rate 0.50 (112,56) BPF: 7 not working
H_2064_516_sparse rate 0.80 (2580,2064) BPF: 258 not working
H_2064_516_sparse rate 0.80 (2580,2064) BPF: 258 working
HRAb_396_504 rate 0.79 (504,396) BPF: 49 not working
H_256_768_22 rate 0.33 (768,256) BPF: 32 working
H_256_512_4 rate 0.50 (512,256) BPF: 32 working
@ -178,9 +180,9 @@ HRAa_1536_512 rate 0.75 (2048,1536) BPF: 192 not working
H_128_256_5 rate 0.50 (256,128) BPF: 16 working
H_4096_8192_3d rate 0.50 (8192,4096) BPF: 512 not working
H_16200_9720 rate 0.60 (16200,9720) BPF: 1215 not working
H_1024_2048_4f rate 0.50 (2048,1024) BPF: 128 not working
H_1024_2048_4f rate 0.50 (2048,1024) BPF: 128 working
'''
# --------------- 2 FSK HRA_56_56, 7 bytes
# --------------- 2 FSK H_128_256_5, 16 bytes
api.FREEDV_MODE_FSK_LDPC_0_ADV = ADVANCED()
api.FREEDV_MODE_FSK_LDPC_0_ADV.interleave_frames = 0
api.FREEDV_MODE_FSK_LDPC_0_ADV.M = 2
@ -188,11 +190,51 @@ api.FREEDV_MODE_FSK_LDPC_0_ADV.Rs = 100
api.FREEDV_MODE_FSK_LDPC_0_ADV.Fs = 8000
api.FREEDV_MODE_FSK_LDPC_0_ADV.first_tone = 1500
api.FREEDV_MODE_FSK_LDPC_0_ADV.tone_spacing = 200
api.FREEDV_MODE_FSK_LDPC_0_ADV.codename = 'HRA_56_56'.encode('utf-8') # code word
api.FREEDV_MODE_FSK_LDPC_0_ADV.codename = 'H_128_256_5'.encode('utf-8') # code word
# --------------- 4 H_256_512_4, 7 bytes
api.FREEDV_MODE_FSK_LDPC_1_ADV = ADVANCED()
api.FREEDV_MODE_FSK_LDPC_1_ADV.interleave_frames = 0
api.FREEDV_MODE_FSK_LDPC_1_ADV.M = 4
api.FREEDV_MODE_FSK_LDPC_1_ADV.Rs = 100
api.FREEDV_MODE_FSK_LDPC_1_ADV.Fs = 8000
api.FREEDV_MODE_FSK_LDPC_1_ADV.first_tone = 1250
api.FREEDV_MODE_FSK_LDPC_1_ADV.tone_spacing = 200
api.FREEDV_MODE_FSK_LDPC_1_ADV.codename = 'H_256_512_4'.encode('utf-8') # code word
# ------- MODEM STATS STRUCTURES
MODEM_STATS_NC_MAX = 50+1
MODEM_STATS_NR_MAX = 160
MODEM_STATS_ET_MAX = 8
MODEM_STATS_EYE_IND_MAX = 160
MODEM_STATS_NSPEC = 512
MODEM_STATS_MAX_F_HZ = 4000
MODEM_STATS_MAX_F_EST = 4
# modem stats structure
class MODEMSTATS(ctypes.Structure):
""" """
_fields_ = [
("Nc", ctypes.c_int),
("snr_est", ctypes.c_float),
("rx_symbols", (ctypes.c_float * MODEM_STATS_NR_MAX)*MODEM_STATS_NC_MAX),
("nr", ctypes.c_int),
("sync", ctypes.c_int),
("foff", ctypes.c_float),
("rx_timing", ctypes.c_float),
("clock_offset", ctypes.c_float),
("sync_metric", ctypes.c_float),
("pre", ctypes.c_int),
("post", ctypes.c_int),
("uw_fails", ctypes.c_int),
("neyetr", ctypes.c_int), # How many eye traces are plotted
("neyesamp", ctypes.c_int), # How many samples in the eye diagram
("f_est", (ctypes.c_float * MODEM_STATS_MAX_F_EST)), # How many samples in the eye diagram
("fft_buf", (ctypes.c_float * MODEM_STATS_NSPEC * 2)),
]
# Return code flags for freedv_get_rx_status() function
api.FREEDV_RX_TRIAL_SYNC = 0x1 # demodulator has trial sync
api.FREEDV_RX_SYNC = 0x2 # demodulator has sync

View file

@ -32,6 +32,8 @@ class DATA():
""" """
def __init__(self):
self.mycallsign = static.MYCALLSIGN # inital set of mycallsign. Will be overwritten later
self.data_queue_transmit = DATA_QUEUE_TRANSMIT
self.data_queue_received = DATA_QUEUE_RECEIVED
@ -69,8 +71,8 @@ class DATA():
self.mode_list_low_bw = [14,12]
self.time_list_low_bw = [3,7]
self.mode_list_high_bw = [14,12,10] # mode list of available modes, each mode will be used 2times per speed level
self.time_list_high_bw = [3, 7, 8] # list for time to wait for correspinding mode in seconds
self.mode_list_high_bw = [14,12,10] #201 = FSK mode list of available modes, each mode will be used 2times per speed level
self.time_list_high_bw = [3, 7, 8, 30] # list for time to wait for correspinding mode in seconds
# mode list for selecting between low bandwith ( 500Hz ) and normal modes with higher bandwith
if static.LOW_BANDWITH_MODE:
@ -197,8 +199,8 @@ class DATA():
#if bytes(bytes_out[1:3]) == static.MYCALLSIGN_CRC or bytes(bytes_out[2:4]) == static.MYCALLSIGN_CRC or frametype == 200 or frametype == 250:
if helpers.check_callsign(static.MYCALLSIGN, bytes(bytes_out[1:3])) or helpers.check_callsign(static.MYCALLSIGN, bytes(bytes_out[2:4])) or frametype == 200 or frametype == 250:
#if bytes(bytes_out[1:3]) == self.mycallsign_CRC or bytes(bytes_out[2:4]) == self.mycallsign_CRC or frametype == 200 or frametype == 250:
if helpers.check_callsign(self.mycallsign, bytes(bytes_out[1:3])) or helpers.check_callsign(self.mycallsign, bytes(bytes_out[2:4])) or frametype == 200 or frametype == 250:
# CHECK IF FRAMETYPE IS BETWEEN 10 and 50 ------------------------
frame = frametype - 10
@ -353,7 +355,7 @@ class DATA():
self.received_mycall_crc = data_in[2:4]
# check if callsign ssid override
mycallsign = helpers.check_callsign(static.MYCALLSIGN, self.received_mycall_crc)[1]
mycallsign = helpers.check_callsign(self.mycallsign, self.received_mycall_crc)[1]
print(mycallsign)
@ -387,6 +389,9 @@ class DATA():
static.RX_BURST_BUFFER[RX_N_FRAME_OF_BURST] = data_in[6:] # [frame_type][n_frames_per_burst][CRC16][CRC16]
structlog.get_logger("structlog").debug("[TNC] static.RX_BURST_BUFFER", buffer=static.RX_BURST_BUFFER)
helpers.add_to_heard_stations(static.DXCALLSIGN,static.DXGRID, 'DATA-CHANNEL', static.SNR, static.FREQ_OFFSET, static.HAMLIB_FREQUENCY)
'''
check if we received all frames per burst by checking if burst buffer has no more "Nones"
this is the ideal case because we received all data
@ -555,7 +560,7 @@ class DATA():
# check if callsign ssid override
mycallsign = helpers.check_callsign(static.MYCALLSIGN, self.received_mycall_crc)[1]
mycallsign = helpers.check_callsign(self.mycallsign, self.received_mycall_crc)[1]
base64_data = base64.b64encode(data_frame)
base64_data = base64_data.decode("utf-8")
@ -584,7 +589,7 @@ class DATA():
# update our statistics AFTER the frame ACK
self.calculate_transfer_rate_rx(self.rx_start_of_transmission, len(static.RX_FRAME_BUFFER))
structlog.get_logger("structlog").info("[TNC] | RX | DATACHANNEL [" + str(static.MYCALLSIGN, 'utf-8') + "]<< >>[" + str(static.DXCALLSIGN, 'utf-8') + "]", snr=static.SNR)
structlog.get_logger("structlog").info("[TNC] | RX | DATACHANNEL [" + str(self.mycallsign, 'utf-8') + "]<< >>[" + str(static.DXCALLSIGN, 'utf-8') + "]", snr=static.SNR)
else:
static.INFO.append("ARQ;RECEIVING;FAILED")
@ -717,13 +722,13 @@ class DATA():
# if speed level is greater than our available modes, set speed level to maximum = lenght of mode list -1
print(self.mode_list)
if self.speed_level >= len(self.mode_list):
self.speed_level = len(self.mode_list) - 1
static.ARQ_SPEED_LEVEL = self.speed_level
data_mode = self.mode_list[self.speed_level]
structlog.get_logger("structlog").debug("Speed-level", level=self.speed_level, retry=self.tx_n_retry_of_burst)
structlog.get_logger("structlog").debug("Speed-level:", level=self.speed_level, retry=self.tx_n_retry_of_burst, mode=data_mode)
@ -872,6 +877,7 @@ class DATA():
# only process data if we are in ARQ and BUSY state
if static.ARQ_STATE:
helpers.add_to_heard_stations(static.DXCALLSIGN,static.DXGRID, 'DATA-CHANNEL', static.SNR, static.FREQ_OFFSET, static.HAMLIB_FREQUENCY)
self.burst_ack = True # Force data loops of TNC to stop and continue with next frame
self.data_channel_last_received = int(time.time()) # we need to update our timeout timestamp
self.burst_ack_snr= int.from_bytes(bytes(data_in[5:6]), "big")
@ -897,6 +903,7 @@ class DATA():
# only process data if we are in ARQ and BUSY state
if static.ARQ_STATE:
helpers.add_to_heard_stations(static.DXCALLSIGN,static.DXGRID, 'DATA-CHANNEL', static.SNR, static.FREQ_OFFSET, static.HAMLIB_FREQUENCY)
self.burst_nack = True # Force data loops of TNC to stop and continue with next frame
self.data_channel_last_received = int(time.time()) # we need to update our timeout timestamp
self.burst_ack_snr= int.from_bytes(bytes(data_in[5:6]), "big")
@ -910,6 +917,7 @@ class DATA():
""" """
# only process data if we are in ARQ and BUSY state
if static.ARQ_STATE:
helpers.add_to_heard_stations(static.DXCALLSIGN,static.DXGRID, 'DATA-CHANNEL', static.SNR, static.FREQ_OFFSET, static.HAMLIB_FREQUENCY)
self.data_frame_ack_received = True # Force data loops of TNC to stop and continue with next frame
self.data_channel_last_received = int(time.time()) # we need to update our timeout timestamp
self.arq_session_last_received = int(time.time()) # we need to update our timeout timestamp
@ -923,6 +931,7 @@ class DATA():
Returns:
"""
helpers.add_to_heard_stations(static.DXCALLSIGN,static.DXGRID, 'DATA-CHANNEL', static.SNR, static.FREQ_OFFSET, static.HAMLIB_FREQUENCY)
static.INFO.append("ARQ;TRANSMITTING;FAILED")
jsondata = {"arq":"transmission", "status" : "failed", "uuid" : self.transmission_uuid}
json_data_out = json.dumps(jsondata)
@ -946,6 +955,7 @@ class DATA():
# only process data if we are in ARQ and BUSY state
if static.ARQ_STATE and static.TNC_STATE == 'BUSY':
helpers.add_to_heard_stations(static.DXCALLSIGN,static.DXGRID, 'DATA-CHANNEL', static.SNR, static.FREQ_OFFSET, static.HAMLIB_FREQUENCY)
self.rpt_request_received = True
self.data_channel_last_received = int(time.time()) # we need to update our timeout timestamp
@ -975,7 +985,7 @@ class DATA():
"""
# das hier müssen wir checken. Sollte vielleicht in INIT!!!
self.datachannel_timeout = False
structlog.get_logger("structlog").info("SESSION [" + str(static.MYCALLSIGN, 'utf-8') + "]>> <<[" + str(static.DXCALLSIGN, 'utf-8') + "]", state=static.ARQ_SESSION_STATE)
structlog.get_logger("structlog").info("SESSION [" + str(self.mycallsign, 'utf-8') + "]>> <<[" + str(static.DXCALLSIGN, 'utf-8') + "]", state=static.ARQ_SESSION_STATE)
self.open_session(callsign)
@ -1010,7 +1020,7 @@ class DATA():
connection_frame[:1] = frametype
connection_frame[1:3] = static.DXCALLSIGN_CRC
connection_frame[3:5] = static.MYCALLSIGN_CRC
connection_frame[5:13] = helpers.callsign_to_bytes(static.MYCALLSIGN)
connection_frame[5:13] = helpers.callsign_to_bytes(self.mycallsign)
while not static.ARQ_SESSION:
@ -1019,7 +1029,7 @@ class DATA():
txbuffer = [connection_frame]
static.TRANSMITTING = True
structlog.get_logger("structlog").info("SESSION [" + str(static.MYCALLSIGN, 'utf-8') + "]>>?<<[" + str(static.DXCALLSIGN, 'utf-8') + "]", a=attempt, state=static.ARQ_SESSION_STATE)
structlog.get_logger("structlog").info("SESSION [" + str(self.mycallsign, 'utf-8') + "]>>?<<[" + str(static.DXCALLSIGN, 'utf-8') + "]", a=attempt, state=static.ARQ_SESSION_STATE)
modem.MODEM_TRANSMIT_QUEUE.put([14,1,0,txbuffer])
# wait while transmitting
@ -1060,7 +1070,8 @@ class DATA():
static.DXCALLSIGN_CRC = bytes(data_in[3:5])
static.DXCALLSIGN = helpers.bytes_to_callsign(bytes(data_in[5:13]))
structlog.get_logger("structlog").info("SESSION [" + str(static.MYCALLSIGN, 'utf-8') + "]>>|<<[" + str(static.DXCALLSIGN, 'utf-8') + "]", state=static.ARQ_SESSION_STATE)
helpers.add_to_heard_stations(static.DXCALLSIGN,static.DXGRID, 'DATA-CHANNEL', static.SNR, static.FREQ_OFFSET, static.HAMLIB_FREQUENCY)
structlog.get_logger("structlog").info("SESSION [" + str(self.mycallsign, 'utf-8') + "]>>|<<[" + str(static.DXCALLSIGN, 'utf-8') + "]", state=static.ARQ_SESSION_STATE)
static.ARQ_SESSION = True
static.TNC_STATE = 'BUSY'
@ -1070,7 +1081,8 @@ class DATA():
def close_session(self):
""" """
static.ARQ_SESSION_STATE = 'disconnecting'
structlog.get_logger("structlog").info("SESSION [" + str(static.MYCALLSIGN, 'utf-8') + "]<<X>>[" + str(static.DXCALLSIGN, 'utf-8') + "]", state=static.ARQ_SESSION_STATE)
helpers.add_to_heard_stations(static.DXCALLSIGN,static.DXGRID, 'DATA-CHANNEL', static.SNR, static.FREQ_OFFSET, static.HAMLIB_FREQUENCY)
structlog.get_logger("structlog").info("SESSION [" + str(self.mycallsign, 'utf-8') + "]<<X>>[" + str(static.DXCALLSIGN, 'utf-8') + "]", state=static.ARQ_SESSION_STATE)
static.INFO.append("ARQ;SESSION;CLOSE")
self.IS_ARQ_SESSION_MASTER = False
static.ARQ_SESSION = False
@ -1082,7 +1094,7 @@ class DATA():
disconnection_frame[:1] = frametype
disconnection_frame[1:3] = static.DXCALLSIGN_CRC
disconnection_frame[3:5] = static.MYCALLSIGN_CRC
disconnection_frame[5:13] = helpers.callsign_to_bytes(static.MYCALLSIGN)
disconnection_frame[5:13] = helpers.callsign_to_bytes(self.mycallsign)
txbuffer = [disconnection_frame]
static.TRANSMITTING = True
@ -1098,7 +1110,8 @@ class DATA():
def received_session_close(self):
""" """
static.ARQ_SESSION_STATE = 'disconnected'
structlog.get_logger("structlog").info("SESSION [" + str(static.MYCALLSIGN, 'utf-8') + "]<<X>>[" + str(static.DXCALLSIGN, 'utf-8') + "]", state=static.ARQ_SESSION_STATE)
helpers.add_to_heard_stations(static.DXCALLSIGN,static.DXGRID, 'DATA-CHANNEL', static.SNR, static.FREQ_OFFSET, static.HAMLIB_FREQUENCY)
structlog.get_logger("structlog").info("SESSION [" + str(self.mycallsign, 'utf-8') + "]<<X>>[" + str(static.DXCALLSIGN, 'utf-8') + "]", state=static.ARQ_SESSION_STATE)
static.INFO.append("ARQ;SESSION;CLOSE")
self.IS_ARQ_SESSION_MASTER = False
@ -1168,6 +1181,9 @@ class DATA():
Returns:
"""
# overwrite mycallsign in case of different SSID
self.mycallsign = mycallsign
static.TNC_STATE = 'BUSY'
self.arq_file_transfer = True
@ -1308,7 +1324,7 @@ class DATA():
helpers.add_to_heard_stations(static.DXCALLSIGN,static.DXGRID, 'DATA-CHANNEL', static.SNR, static.FREQ_OFFSET, static.HAMLIB_FREQUENCY)
# check if callsign ssid override
mycallsign = helpers.check_callsign(static.MYCALLSIGN, data_in[1:3])[1]
mycallsign = helpers.check_callsign(self.mycallsign, data_in[1:3])[1]
structlog.get_logger("structlog").info("[TNC] ARQ | DATA | RX | [" + str(mycallsign, 'utf-8') + "]>> <<[" + str(static.DXCALLSIGN, 'utf-8') + "]", bandwith="wide")
@ -1377,7 +1393,7 @@ class DATA():
helpers.add_to_heard_stations(static.DXCALLSIGN,static.DXGRID, 'DATA-CHANNEL', static.SNR, static.FREQ_OFFSET, static.HAMLIB_FREQUENCY)
structlog.get_logger("structlog").info("[TNC] ARQ | DATA | TX | [" + str(static.MYCALLSIGN, 'utf-8') + "]>>|<<[" + str(static.DXCALLSIGN, 'utf-8') + "]", snr=static.SNR)
structlog.get_logger("structlog").info("[TNC] ARQ | DATA | TX | [" + str(self.mycallsign, 'utf-8') + "]>>|<<[" + str(static.DXCALLSIGN, 'utf-8') + "]", snr=static.SNR)
# as soon as we set ARQ_STATE to DATA, transmission starts
static.ARQ_STATE = True
@ -1404,17 +1420,17 @@ class DATA():
static.DXCALLSIGN_CRC = helpers.get_crc_16(static.DXCALLSIGN)
static.INFO.append("PING;SENDING")
structlog.get_logger("structlog").info("[TNC] PING REQ [" + str(static.MYCALLSIGN, 'utf-8') + "] >>> [" + str(static.DXCALLSIGN, 'utf-8') + "]" )
structlog.get_logger("structlog").info("[TNC] PING REQ [" + str(self.mycallsign, 'utf-8') + "] >>> [" + str(static.DXCALLSIGN, 'utf-8') + "]" )
ping_frame = bytearray(14)
ping_frame[:1] = bytes([210])
ping_frame[1:3] = static.DXCALLSIGN_CRC
ping_frame[3:5] = static.MYCALLSIGN_CRC
ping_frame[5:13] = helpers.callsign_to_bytes(static.MYCALLSIGN)
ping_frame[5:13] = helpers.callsign_to_bytes(self.mycallsign)
txbuffer = [ping_frame]
static.TRANSMITTING = True
modem.MODEM_TRANSMIT_QUEUE.put([14,1,0,txbuffer])
modem.MODEM_TRANSMIT_QUEUE.put(['FSK_LDPC_0',1,0,txbuffer])
# wait while transmitting
while static.TRANSMITTING:
time.sleep(0.01)
@ -1438,7 +1454,7 @@ class DATA():
static.INFO.append("PING;RECEIVING")
# check if callsign ssid override
mycallsign = helpers.check_callsign(static.MYCALLSIGN, data_in[1:3])[1]
mycallsign = helpers.check_callsign(self.mycallsign, data_in[1:3])[1]
structlog.get_logger("structlog").info("[TNC] PING REQ [" + str(mycallsign, 'utf-8') + "] <<< [" + str(static.DXCALLSIGN, 'utf-8') + "]", snr=static.SNR )
@ -1451,7 +1467,7 @@ class DATA():
txbuffer = [ping_frame]
static.TRANSMITTING = True
modem.MODEM_TRANSMIT_QUEUE.put([14,1,0,txbuffer])
modem.MODEM_TRANSMIT_QUEUE.put(['FSK_LDPC_0',1,0,txbuffer])
# wait while transmitting
while static.TRANSMITTING:
time.sleep(0.01)
@ -1468,12 +1484,16 @@ class DATA():
static.DXCALLSIGN_CRC = bytes(data_in[3:5])
static.DXGRID = bytes(data_in[5:11]).rstrip(b'\x00')
jsondata = {"type" : "ping", "status" : "ack", "uuid" : str(uuid.uuid4()), "timestamp": int(time.time()), "mycallsign" : str(self.mycallsign, 'utf-8'), "dxcallsign": str(static.DXCALLSIGN, 'utf-8'), "dxgrid": str(static.DXGRID, 'utf-8'), "snr": str(static.SNR)}
json_data_out = json.dumps(jsondata)
sock.SOCKET_QUEUE.put(json_data_out)
helpers.add_to_heard_stations(static.DXCALLSIGN, static.DXGRID, 'PING-ACK', static.SNR, static.FREQ_OFFSET, static.HAMLIB_FREQUENCY)
static.INFO.append("PING;RECEIVEDACK")
structlog.get_logger("structlog").info("[TNC] PING ACK [" + str(static.MYCALLSIGN, 'utf-8') + "] >|< [" + str(static.DXCALLSIGN, 'utf-8') + "]", snr=static.SNR )
structlog.get_logger("structlog").info("[TNC] PING ACK [" + str(self.mycallsign, 'utf-8') + "] >|< [" + str(static.DXCALLSIGN, 'utf-8') + "]", snr=static.SNR )
static.TNC_STATE = 'IDLE'
@ -1531,7 +1551,7 @@ class DATA():
beacon_frame = bytearray(14)
beacon_frame[:1] = bytes([250])
beacon_frame[1:9] = helpers.callsign_to_bytes(static.MYCALLSIGN)
beacon_frame[1:9] = helpers.callsign_to_bytes(self.mycallsign)
beacon_frame[9:13] = static.MYGRID[:4]
txbuffer = [beacon_frame]
@ -1560,6 +1580,11 @@ class DATA():
# here we add the received station to the heard stations buffer
dxcallsign = helpers.bytes_to_callsign(bytes(data_in[1:9]))
dxgrid = bytes(data_in[9:13]).rstrip(b'\x00')
jsondata = {"type" : "beacon", "status" : "received", "uuid" : str(uuid.uuid4()), "timestamp": int(time.time()), "mycallsign" : str(self.mycallsign, 'utf-8'), "dxcallsign": str(dxcallsign, 'utf-8'), "dxgrid": str(dxgrid, 'utf-8'), "snr": str(static.SNR)}
json_data_out = json.dumps(jsondata)
sock.SOCKET_QUEUE.put(json_data_out)
static.INFO.append("BEACON;RECEIVING")
structlog.get_logger("structlog").info("[TNC] BEACON RCVD [" + str(dxcallsign, 'utf-8') + "]["+ str(dxgrid, 'utf-8') +"] ", snr=static.SNR)
helpers.add_to_heard_stations(dxcallsign,dxgrid, 'BEACON', static.SNR, static.FREQ_OFFSET, static.HAMLIB_FREQUENCY)
@ -1575,7 +1600,7 @@ class DATA():
cq_frame = bytearray(14)
cq_frame[:1] = bytes([200])
cq_frame[1:9] = helpers.callsign_to_bytes(static.MYCALLSIGN)
cq_frame[1:9] = helpers.callsign_to_bytes(self.mycallsign)
cq_frame[9:13] = static.MYGRID[:4]
txbuffer = [cq_frame]
@ -1617,7 +1642,6 @@ class DATA():
"""
try:
print(static.TOTAL_BYTES)
if static.TOTAL_BYTES == 0:
static.TOTAL_BYTES = 1
static.ARQ_TRANSMISSION_PERCENT = int((receivedbytes*static.ARQ_COMPRESSION_FACTOR / (static.TOTAL_BYTES)) * 100)
@ -1712,10 +1736,11 @@ class DATA():
# reset modem receiving state to reduce cpu load
modem.RECEIVE_DATAC1 = False
modem.RECEIVE_DATAC3 = False
modem.RECEIVE_FSK_LDPC = False
#modem.RECEIVE_FSK_LDPC_0 = False
modem.RECEIVE_FSK_LDPC_1 = False
# reset buffer overflow counter
static.BUFFER_OVERFLOW_COUNTER = [0,0,0]
static.BUFFER_OVERFLOW_COUNTER = [0,0,0,0,0]
self.is_IRS = False
self.burst_nack = False
@ -1773,17 +1798,16 @@ class DATA():
if mode_name == 'datac1':
modem.RECEIVE_DATAC1 = True
structlog.get_logger("structlog").debug("changing listening data mode", mode="datac1")
elif mode_name == 'datac3':
modem.RECEIVE_DATAC3 = True
structlog.get_logger("structlog").debug("changing listening data mode", mode="datac3")
elif mode_name == 'fsk_ldpc':
modem.RECEIVE_FSK_LDPC = True
structlog.get_logger("structlog").debug("changing listening data mode", mode="fsk_ldpc")
elif mode_name == 'fsk_ldpc_1':
modem.RECEIVE_FSK_LDPC_1 = True
structlog.get_logger("structlog").debug("changing listening data mode", mode="fsk_ldpc_1")
elif mode_name == 'allmodes':
modem.RECEIVE_DATAC1 = True
modem.RECEIVE_DATAC3 = True
modem.RECEIVE_FSK_LDPC = True
modem.RECEIVE_FSK_LDPC_1 = True
structlog.get_logger("structlog").debug("changing listening data mode", mode="datac1/datac3/fsk_ldpc")
@ -1872,7 +1896,7 @@ class DATA():
#pass
else:
self.data_channel_last_received = 0
structlog.get_logger("structlog").info("DATA [" + str(static.MYCALLSIGN, 'utf-8') + "]<<T>>[" + str(static.DXCALLSIGN, 'utf-8') + "]")
structlog.get_logger("structlog").info("DATA [" + str(self.mycallsign, 'utf-8') + "]<<T>>[" + str(static.DXCALLSIGN, 'utf-8') + "]")
static.INFO.append("ARQ;RECEIVING;FAILED")
if not TESTMODE:
self.arq_cleanup()
@ -1886,7 +1910,7 @@ class DATA():
if self.arq_session_last_received + self.arq_session_timeout > time.time():
time.sleep(0.01)
else:
structlog.get_logger("structlog").info("SESSION [" + str(static.MYCALLSIGN, 'utf-8') + "]<<T>>[" + str(static.DXCALLSIGN, 'utf-8') + "]")
structlog.get_logger("structlog").info("SESSION [" + str(self.mycallsign, 'utf-8') + "]<<T>>[" + str(static.DXCALLSIGN, 'utf-8') + "]")
static.INFO.append("ARQ;SESSION;TIMEOUT")
self.close_session()

View file

@ -25,31 +25,10 @@ import queue
import codec2
import audio
import sounddevice as sd
from collections import deque
MODEM_STATS_NR_MAX = 320
MODEM_STATS_NC_MAX = 51
# modem stats structure
class MODEMSTATS(ctypes.Structure):
""" """
_fields_ = [
("Nc", ctypes.c_int),
("snr_est", ctypes.c_float),
("rx_symbols", (ctypes.c_float * MODEM_STATS_NR_MAX)*MODEM_STATS_NC_MAX),
("nr", ctypes.c_int),
("sync", ctypes.c_int),
("foff", ctypes.c_float),
("rx_timing", ctypes.c_float),
("clock_offset", ctypes.c_float),
("sync_metric", ctypes.c_float),
("pre", ctypes.c_int),
("post", ctypes.c_int),
("uw_fails", ctypes.c_int),
]
# init FIFO queue to store received frames in
MODEM_RECEIVED_QUEUE = queue.Queue()
@ -59,7 +38,7 @@ static.TRANSMITTING = False
# receive only specific modes to reduce cpu load
RECEIVE_DATAC1 = False
RECEIVE_DATAC3 = False
RECEIVE_FSK_LDPC_0 = False
RECEIVE_FSK_LDPC_1 = False
class RF():
""" """
@ -128,21 +107,27 @@ class RF():
self.datac3_buffer = codec2.audio_buffer(2*self.AUDIO_FRAMES_PER_BUFFER_RX)
self.fsk_ldpc_freedv = cast(codec2.api.freedv_open_advanced(9, ctypes.byref(codec2.api.FREEDV_MODE_FSK_LDPC_0_ADV)), c_void_p)
#self.fsk_ldpc_freedv = cast(codec2.api.freedv_open(codec2.api.FREEDV_MODE_FSK_LDPC), c_void_p)
self.fsk_ldpc_bytes_per_frame = int(codec2.api.freedv_get_bits_per_modem_frame(self.fsk_ldpc_freedv)/8)
self.fsk_ldpc_bytes_out = create_string_buffer(self.fsk_ldpc_bytes_per_frame)
#codec2.api.freedv_set_frames_per_burst(self.fsk_ldpc_freedv,1)
self.fsk_ldpc_buffer = codec2.audio_buffer(self.AUDIO_FRAMES_PER_BUFFER_RX)
self.fsk_ldpc_freedv_0 = cast(codec2.api.freedv_open_advanced(codec2.api.FREEDV_MODE_FSK_LDPC, ctypes.byref(codec2.api.FREEDV_MODE_FSK_LDPC_0_ADV)), c_void_p)
self.fsk_ldpc_bytes_per_frame_0 = int(codec2.api.freedv_get_bits_per_modem_frame(self.fsk_ldpc_freedv_0)/8)
self.fsk_ldpc_bytes_out_0 = create_string_buffer(self.fsk_ldpc_bytes_per_frame_0)
#codec2.api.freedv_set_frames_per_burst(self.fsk_ldpc_freedv_0,1)
self.fsk_ldpc_buffer_0 = codec2.audio_buffer(self.AUDIO_FRAMES_PER_BUFFER_RX)
self.fsk_ldpc_freedv_1 = cast(codec2.api.freedv_open_advanced(codec2.api.FREEDV_MODE_FSK_LDPC, ctypes.byref(codec2.api.FREEDV_MODE_FSK_LDPC_1_ADV)), c_void_p)
self.fsk_ldpc_bytes_per_frame_1 = int(codec2.api.freedv_get_bits_per_modem_frame(self.fsk_ldpc_freedv_1)/8)
self.fsk_ldpc_bytes_out_1 = create_string_buffer(self.fsk_ldpc_bytes_per_frame_1)
#codec2.api.freedv_set_frames_per_burst(self.fsk_ldpc_freedv_0,1)
self.fsk_ldpc_buffer_1 = codec2.audio_buffer(self.AUDIO_FRAMES_PER_BUFFER_RX)
# initial nin values
self.datac0_nin = codec2.api.freedv_nin(self.datac0_freedv)
self.datac1_nin = codec2.api.freedv_nin(self.datac1_freedv)
self.datac3_nin = codec2.api.freedv_nin(self.datac3_freedv)
self.fsk_ldpc_nin = codec2.api.freedv_nin(self.fsk_ldpc_freedv)
self.fsk_ldpc_nin_0 = codec2.api.freedv_nin(self.fsk_ldpc_freedv_0)
self.fsk_ldpc_nin_1 = codec2.api.freedv_nin(self.fsk_ldpc_freedv_1)
# --------------------------------------------CREATE PYAUDIO INSTANCE
'''
try:
# we need to "try" this, because sometimes libasound.so isn't in the default place
# try to supress error messages
@ -164,8 +149,10 @@ class RF():
static.AUDIO_INPUT_DEVICE = loopback_list[0] #0 = RX
static.AUDIO_OUTPUT_DEVICE = loopback_list[1] #1 = TX
print(f"loopback_list rx: {loopback_list}", file=sys.stderr)
'''
try:
'''
self.audio_stream = self.p.open(format=audio.pyaudio.paInt16,
channels=self.AUDIO_CHANNELS,
rate=self.AUDIO_SAMPLE_RATE_RX,
@ -176,6 +163,12 @@ class RF():
output_device_index=static.AUDIO_OUTPUT_DEVICE,
stream_callback=self.audio_callback
)
'''
self.stream = sd.RawStream(channels=self.AUDIO_CHANNELS, dtype='int16', callback=self.callback, device=static.AUDIO_OUTPUT_DEVICE, samplerate = self.AUDIO_SAMPLE_RATE_RX, blocksize=4800)
self.stream.start()
atexit.register(self.stream.stop)
structlog.get_logger("structlog").info("opened audio devices")
except Exception as e:
@ -184,7 +177,7 @@ class RF():
try:
structlog.get_logger("structlog").debug("[TNC] starting pyaudio callback")
self.audio_stream.start_stream()
#self.audio_stream.start_stream()
except Exception as e:
structlog.get_logger("structlog").error("[TNC] starting pyaudio callback failed", e=e)
@ -222,7 +215,10 @@ class RF():
audio_thread_fsk_ldpc0 = threading.Thread(target=self.audio_fsk_ldpc_0, name="AUDIO_THREAD FSK LDPC0",daemon=True)
audio_thread_fsk_ldpc0.start()
audio_thread_fsk_ldpc1 = threading.Thread(target=self.audio_fsk_ldpc_1, name="AUDIO_THREAD FSK LDPC1",daemon=True)
audio_thread_fsk_ldpc1.start()
hamlib_thread = threading.Thread(target=self.update_rig_data, name="HAMLIB_THREAD",daemon=True)
hamlib_thread.start()
@ -233,7 +229,8 @@ class RF():
worker_transmit.start()
# --------------------------------------------------------------------------------------------------------
def audio_callback(self, data_in48k, frame_count, time_info, status):
#def audio_callback(self, data_in48k, frame_count, time_info, status):
def callback(self, data_in48k, outdata, frames, time, status):
"""
Args:
@ -248,7 +245,7 @@ class RF():
x = np.frombuffer(data_in48k, dtype=np.int16)
x = self.resampler.resample48_to_8(x)
length_x = len(x)
# avoid decoding when transmitting to reduce CPU
@ -274,22 +271,34 @@ class RF():
static.BUFFER_OVERFLOW_COUNTER[2] += 1
# avoid buffer overflow by filling only if buffer not full and selected datachannel mode
if not self.fsk_ldpc_buffer.nbuffer+length_x > self.fsk_ldpc_buffer.size:
if not self.fsk_ldpc_buffer_0.nbuffer+length_x > self.fsk_ldpc_buffer_0.size:
#if RECEIVE_FSK_LDPC_0:
self.fsk_ldpc_buffer.push(x)
self.fsk_ldpc_buffer_0.push(x)
else:
static.BUFFER_OVERFLOW_COUNTER[2] += 1
if not self.modoutqueue or self.mod_out_locked:
data_out48k = np.zeros(frame_count, dtype=np.int16)
static.BUFFER_OVERFLOW_COUNTER[3] += 1
# avoid buffer overflow by filling only if buffer not full and selected datachannel mode
if not self.fsk_ldpc_buffer_1.nbuffer+length_x > self.fsk_ldpc_buffer_1.size:
if RECEIVE_FSK_LDPC_1:
self.fsk_ldpc_buffer_1.push(x)
else:
static.BUFFER_OVERFLOW_COUNTER[4] += 1
if len(self.modoutqueue) <= 0 or self.mod_out_locked:
#if not self.modoutqueue or self.mod_out_locked:
data_out48k = np.zeros(frames, dtype=np.int16)
self.fft_data = bytes(x)
else:
data_out48k = self.modoutqueue.popleft()
self.fft_data = bytes(data_out48k)
return (data_out48k, audio.pyaudio.paContinue)
try:
outdata[:] = data_out48k[:frames]
except Exception as e:
print(e)
#return (data_out48k, audio.pyaudio.paContinue)
# --------------------------------------------------------------------------------------------------------
@ -315,12 +324,16 @@ class RF():
# open codec2 instance
self.MODE = mode
if self.MODE == 'FSK_LDPC_0':
print(self.MODE)
if self.MODE == 'FSK_LDPC_0' or self.MODE == 200:
freedv = cast(codec2.api.freedv_open_advanced(codec2.api.FREEDV_MODE_FSK_LDPC, ctypes.byref(codec2.api.FREEDV_MODE_FSK_LDPC_0_ADV)), c_void_p)
elif self.MODE == 'FSK_LDPC_1' or self.MODE == 201:
freedv = cast(codec2.api.freedv_open_advanced(codec2.api.FREEDV_MODE_FSK_LDPC, ctypes.byref(codec2.api.FREEDV_MODE_FSK_LDPC_1_ADV)), c_void_p)
else:
freedv = cast(codec2.api.freedv_open(self.MODE), c_void_p)
# get number of bytes per frame for mode
bytes_per_frame = int(codec2.api.freedv_get_bits_per_modem_frame(freedv)/8)
@ -383,15 +396,19 @@ class RF():
# deaktivated for testing purposes
self.mod_out_locked = False
chunk_length = self.AUDIO_FRAMES_PER_BUFFER_TX #4800
chunk = [txbuffer_48k[i:i+chunk_length] for i in range(0, len(txbuffer_48k), chunk_length)]
for c in chunk:
if len(c) < chunk_length:
delta = chunk_length - len(c)
delta_zeros = np.zeros(delta, dtype=np.int16)
c = np.append(c, delta_zeros)
#structlog.get_logger("structlog").debug("[TNC] mod out shorter than audio buffer", delta=delta)
self.modoutqueue.append(c)
# Release our mod_out_lock so we can use the queue
self.mod_out_locked = False
@ -417,7 +434,7 @@ class RF():
def audio_datac0(self):
""" """
nbytes_datac0 = 0
while self.audio_stream.is_active():
while self.stream.active:
threading.Event().wait(0.01)
while self.datac0_buffer.nbuffer >= self.datac0_nin:
# demodulate audio
@ -426,13 +443,13 @@ class RF():
self.datac0_nin = codec2.api.freedv_nin(self.datac0_freedv)
if nbytes_datac0 == self.datac0_bytes_per_frame:
self.modem_received_queue.put([self.datac0_bytes_out, self.datac0_freedv ,self.datac0_bytes_per_frame])
self.get_scatter(self.datac0_freedv)
#self.get_scatter(self.datac0_freedv)
self.calculate_snr(self.datac0_freedv)
def audio_datac1(self):
""" """
nbytes_datac1 = 0
while self.audio_stream.is_active():
while self.stream.active:
threading.Event().wait(0.01)
while self.datac1_buffer.nbuffer >= self.datac1_nin:
# demodulate audio
@ -441,13 +458,13 @@ class RF():
self.datac1_nin = codec2.api.freedv_nin(self.datac1_freedv)
if nbytes_datac1 == self.datac1_bytes_per_frame:
self.modem_received_queue.put([self.datac1_bytes_out, self.datac1_freedv ,self.datac1_bytes_per_frame])
self.get_scatter(self.datac1_freedv)
#self.get_scatter(self.datac1_freedv)
self.calculate_snr(self.datac1_freedv)
def audio_datac3(self):
""" """
nbytes_datac3 = 0
while self.audio_stream.is_active():
while self.stream.active:
threading.Event().wait(0.01)
while self.datac3_buffer.nbuffer >= self.datac3_nin:
# demodulate audio
@ -456,24 +473,41 @@ class RF():
self.datac3_nin = codec2.api.freedv_nin(self.datac3_freedv)
if nbytes_datac3 == self.datac3_bytes_per_frame:
self.modem_received_queue.put([self.datac3_bytes_out, self.datac3_freedv ,self.datac3_bytes_per_frame])
self.get_scatter(self.datac3_freedv)
#self.get_scatter(self.datac3_freedv)
self.calculate_snr(self.datac3_freedv)
def audio_fsk_ldpc_0(self):
""" """
nbytes_fsk_ldpc = 0
while self.audio_stream.is_active():
nbytes_fsk_ldpc_0 = 0
while self.stream.active:
threading.Event().wait(0.01)
while self.fsk_ldpc_buffer.nbuffer >= self.fsk_ldpc_nin:
while self.fsk_ldpc_buffer_0.nbuffer >= self.fsk_ldpc_nin_0:
# demodulate audio
nbytes_fsk_ldpc = codec2.api.freedv_rawdatarx(self.fsk_ldpc_freedv, self.fsk_ldpc_bytes_out, self.fsk_ldpc_buffer.buffer.ctypes)
self.fsk_ldpc_buffer.pop(self.fsk_ldpc_nin)
self.fsk_ldpc_nin = codec2.api.freedv_nin(self.fsk_ldpc_freedv)
if nbytes_fsk_ldpc == self.fsk_ldpc_bytes_per_frame:
self.modem_received_queue.put([self.fsk_ldpc_bytes_out, self.fsk_ldpc_freedv ,self.fsk_ldpc_bytes_per_frame])
self.get_scatter(self.fsk_ldpc_freedv)
self.calculate_snr(self.fsk_ldpc_freedv)
nbytes_fsk_ldpc_0 = codec2.api.freedv_rawdatarx(self.fsk_ldpc_freedv_0, self.fsk_ldpc_bytes_out_0, self.fsk_ldpc_buffer_0.buffer.ctypes)
self.fsk_ldpc_buffer_0.pop(self.fsk_ldpc_nin_0)
self.fsk_ldpc_nin_0 = codec2.api.freedv_nin(self.fsk_ldpc_freedv_0)
if nbytes_fsk_ldpc_0 == self.fsk_ldpc_bytes_per_frame_0:
self.modem_received_queue.put([self.fsk_ldpc_bytes_out_0, self.fsk_ldpc_freedv_0 ,self.fsk_ldpc_bytes_per_frame_0])
#self.get_scatter(self.fsk_ldpc_freedv_0)
self.calculate_snr(self.fsk_ldpc_freedv_0)
def audio_fsk_ldpc_1(self):
""" """
nbytes_fsk_ldpc_1 = 0
while self.stream.active:
threading.Event().wait(0.01)
while self.fsk_ldpc_buffer_1.nbuffer >= self.fsk_ldpc_nin_1:
# demodulate audio
nbytes_fsk_ldpc_1 = codec2.api.freedv_rawdatarx(self.fsk_ldpc_freedv_1, self.fsk_ldpc_bytes_out_1, self.fsk_ldpc_buffer_1.buffer.ctypes)
self.fsk_ldpc_buffer_1.pop(self.fsk_ldpc_nin_1)
self.fsk_ldpc_nin_1 = codec2.api.freedv_nin(self.fsk_ldpc_freedv_1)
if nbytes_fsk_ldpc_1 == self.fsk_ldpc_bytes_per_frame_1:
self.modem_received_queue.put([self.fsk_ldpc_bytes_out_1, self.fsk_ldpc_freedv_1 ,self.fsk_ldpc_bytes_per_frame_1])
#self.get_scatter(self.fsk_ldpc_freedv_1)
self.calculate_snr(self.fsk_ldpc_freedv_1)
# worker for FIFO queue for processing received frames
def worker_transmit(self):
""" """
@ -523,14 +557,14 @@ class RF():
"""
if static.ENABLE_SCATTER:
modemStats = MODEMSTATS()
modemStats = codec2.MODEMSTATS()
self.c_lib.freedv_get_modem_extended_stats.restype = None
self.c_lib.freedv_get_modem_extended_stats(freedv, ctypes.byref(modemStats))
scatterdata = []
scatterdata_small = []
for i in range(MODEM_STATS_NC_MAX):
for j in range(MODEM_STATS_NR_MAX):
for i in range(codec2.MODEM_STATS_NC_MAX):
for j in range(codec2.MODEM_STATS_NR_MAX):
# check if odd or not to get every 2nd item for x
if (j % 2) == 0:
xsymbols = round(modemStats.rx_symbols[i][j]/1000)
@ -558,15 +592,16 @@ class RF():
"""
modem_stats_snr = c_float()
modem_stats_sync = c_int()
self.c_lib.freedv_get_modem_stats(freedv, byref(modem_stats_sync), byref(modem_stats_snr))
modem_stats_snr = modem_stats_snr.value
modem_stats_sync = modem_stats_sync.value
try:
modem_stats_snr = c_float()
modem_stats_sync = c_int()
self.c_lib.freedv_get_modem_stats(freedv, byref(modem_stats_sync), byref(modem_stats_snr))
modem_stats_snr = modem_stats_snr.value
modem_stats_sync = modem_stats_sync.value
snr = round(modem_stats_snr, 1)
print(snr)
static.SNR = np.clip(snr, 0, 255) #limit to max value of 255
return static.SNR
except:
@ -662,20 +697,26 @@ class RF():
"""
codec2.api.freedv_set_frames_per_burst(self.datac1_freedv,n_frames_per_burst)
codec2.api.freedv_set_frames_per_burst(self.datac3_freedv,n_frames_per_burst)
codec2.api.freedv_set_frames_per_burst(self.fsk_ldpc_freedv,n_frames_per_burst)
codec2.api.freedv_set_frames_per_burst(self.fsk_ldpc_freedv_0,n_frames_per_burst)
def get_bytes_per_frame(mode):
"""
provide bytes per frame information for accessing from data handler
Args:
mode:
Returns:
"""
freedv = cast(codec2.api.freedv_open(mode), c_void_p)
if mode == 200:
freedv = cast(codec2.api.freedv_open_advanced(codec2.api.FREEDV_MODE_FSK_LDPC, ctypes.byref(codec2.api.FREEDV_MODE_FSK_LDPC_0_ADV)), c_void_p)
elif mode == 201:
freedv = cast(codec2.api.freedv_open_advanced(codec2.api.FREEDV_MODE_FSK_LDPC, ctypes.byref(codec2.api.FREEDV_MODE_FSK_LDPC_1_ADV)), c_void_p)
else:
freedv = cast(codec2.api.freedv_open(mode), c_void_p)
# get number of bytes per frame for mode
return int(codec2.api.freedv_get_bits_per_modem_frame(freedv)/8)