+#![doc = include_str!("../README.md")]
+
use codec2::{Codec2, Codec2Mode};
use cpal::traits::DeviceTrait;
use cpal::traits::HostTrait;
use log::debug;
use m17app::adapter::StreamAdapter;
use m17app::app::TxHandle;
-use m17core::protocol::LsfFrame;
+use m17app::link_setup::LinkSetup;
+use m17app::link_setup::M17Address;
+use m17app::StreamFrame;
use std::collections::VecDeque;
use std::fs::File;
use std::io::Write;
use std::path::Path;
+use std::path::PathBuf;
use std::sync::{
mpsc::{channel, Receiver, Sender},
Arc, Mutex,
};
+use std::time::Duration;
+use std::time::Instant;
pub fn decode_codec2<P: AsRef<Path>>(data: &[u8], out_path: P) -> Vec<i16> {
let codec2 = Codec2::new(Codec2Mode::MODE_3200);
all_samples
}
+/// Subscribes to M17 streams and attempts to play the decoded Codec2
pub struct Codec2Adapter {
state: Arc<Mutex<AdapterState>>,
// TODO: make this configurable
codec2: Codec2::new(Codec2Mode::MODE_3200),
end_tx: None,
})),
+ // TODO: this doesn't work on rpi. Use default_output_device() by default
output_card: "default".to_owned(),
}
}
}
+impl Default for Codec2Adapter {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
struct AdapterState {
tx: Option<TxHandle>,
/// Circular buffer of output samples for playback
fn tnc_closed(&self) {}
- fn stream_began(&self, lsf: LsfFrame) {
+ fn stream_began(&self, _link_setup: LinkSetup) {
// for now we will assume:
// - unencrypted
// - data type is Voice (Codec2 3200), not Voice+Data
self.state.lock().unwrap().codec2 = Codec2::new(Codec2Mode::MODE_3200);
}
- fn stream_data(&self, frame_number: u16, is_final: bool, data: Arc<[u8; 16]>) {
+ fn stream_data(&self, _frame_number: u16, _is_final: bool, data: Arc<[u8; 16]>) {
let mut state = self.state.lock().unwrap();
for encoded in data.chunks(8) {
if state.out_buf.len() < 1024 {
fn output_cb(data: &mut [i16], state: &Mutex<AdapterState>) {
let mut state = state.lock().unwrap();
- debug!(
- "sound card wants {} samples, we have {} in the buffer",
- data.len(),
- state.out_buf.len()
- );
for d in data {
*d = state.out_buf.pop_front().unwrap_or(i16::EQUILIBRIUM);
}
.find(|d| d.name().unwrap() == output_card)
.unwrap();
let mut configs = device.supported_output_configs().unwrap();
+ // TODO: channels == 1 doesn't work on a Raspberry Pi
+ // make this configurable and support interleaving LRLR stereo samples if using 2 channels
let config = configs
.find(|c| c.channels() == 1 && c.sample_format() == SampleFormat::I16)
.unwrap()
let stream = device
.build_output_stream(
&config.into(),
- move |data: &mut [i16], info: &cpal::OutputCallbackInfo| {
- debug!(
- "callback {:?} playback {:?}",
- info.timestamp().callback,
- info.timestamp().playback
- );
+ move |data: &mut [i16], _info: &cpal::OutputCallbackInfo| {
output_cb(data, &state);
},
|e| {
let _ = end.recv();
// it seems concrete impls of Stream have a Drop implementation that will handle termination
}
+
+/// Transmits a wave file as an M17 stream
+pub struct WavePlayer;
+
+impl WavePlayer {
+ /// Plays a wave file (blocking).
+ ///
+ /// * `path`: wave file to transmit, must be 8 kHz mono and 16-bit LE
+ /// * `tx`: a `TxHandle` obtained from an `M17App`
+ /// * `source`: address of transmission source
+ /// * `destination`: address of transmission destination
+ /// * `channel_access_number`: from 0 to 15, usually 0
+ pub fn play(
+ path: PathBuf,
+ tx: TxHandle,
+ source: &M17Address,
+ destination: &M17Address,
+ channel_access_number: u8,
+ ) {
+ let mut reader = hound::WavReader::open(path).unwrap();
+ let mut samples = reader.samples::<i16>();
+
+ let mut codec = Codec2::new(Codec2Mode::MODE_3200);
+ let mut in_buf = [0i16; 160];
+ let mut out_buf = [0u8; 16];
+ let mut lsf_chunk: usize = 0;
+ const TICK: Duration = Duration::from_millis(40);
+ let mut next_tick = Instant::now() + TICK;
+ let mut frame_number = 0;
+
+ let mut setup = LinkSetup::new_voice(source, destination);
+ setup.set_channel_access_number(channel_access_number);
+ tx.transmit_stream_start(&setup);
+
+ loop {
+ let mut last_one = false;
+ for out in out_buf.chunks_mut(8) {
+ for i in in_buf.iter_mut() {
+ let sample = match samples.next() {
+ Some(Ok(sample)) => sample,
+ _ => {
+ last_one = true;
+ 0
+ }
+ };
+ *i = sample;
+ }
+ codec.encode(out, &in_buf);
+ }
+ tx.transmit_stream_next(&StreamFrame {
+ lich_idx: lsf_chunk as u8,
+ lich_part: setup.lich_part(lsf_chunk as u8),
+ frame_number,
+ end_of_stream: last_one,
+ stream_data: out_buf,
+ });
+ frame_number += 1;
+ lsf_chunk = (lsf_chunk + 1) % 6;
+
+ if last_one {
+ break;
+ }
+
+ std::thread::sleep(next_tick.duration_since(Instant::now()));
+ next_tick += TICK;
+ }
+ }
+}