]> code.octet-stream.net Git - m17rt/blob - m17codec2/src/lib.rs
1d05f2720f827ac23ed982c6f243b9b06486232e
[m17rt] / m17codec2 / src / lib.rs
1 use codec2::{Codec2, Codec2Mode};
2 use cpal::traits::DeviceTrait;
3 use cpal::traits::HostTrait;
4 use cpal::traits::StreamTrait;
5 use cpal::{Sample, SampleFormat, SampleRate};
6 use log::debug;
7 use m17app::adapter::StreamAdapter;
8 use m17app::app::TxHandle;
9 use m17app::link_setup::LinkSetup;
10 use m17app::link_setup::M17Address;
11 use m17app::StreamFrame;
12 use std::collections::VecDeque;
13 use std::fs::File;
14 use std::io::Write;
15 use std::path::Path;
16 use std::path::PathBuf;
17 use std::sync::{
18 mpsc::{channel, Receiver, Sender},
19 Arc, Mutex,
20 };
21 use std::time::Duration;
22 use std::time::Instant;
23
24 pub fn decode_codec2<P: AsRef<Path>>(data: &[u8], out_path: P) -> Vec<i16> {
25 let codec2 = Codec2::new(Codec2Mode::MODE_3200);
26 let var_name = codec2;
27 let mut codec = var_name;
28 let mut all_samples: Vec<i16> = vec![];
29 for i in 0..(data.len() / 8) {
30 let mut samples = vec![0; codec.samples_per_frame()];
31 codec.decode(&mut samples, &data[i * 8..((i + 1) * 8)]);
32 all_samples.append(&mut samples);
33 }
34
35 // dude this works
36 let mut speech_out = File::create(out_path).unwrap();
37 for b in &all_samples {
38 speech_out.write_all(&b.to_le_bytes()).unwrap();
39 }
40 all_samples
41 }
42
43 pub struct Codec2Adapter {
44 state: Arc<Mutex<AdapterState>>,
45 // TODO: make this configurable
46 output_card: String,
47 }
48
49 impl Codec2Adapter {
50 pub fn new() -> Self {
51 Self {
52 state: Arc::new(Mutex::new(AdapterState {
53 tx: None,
54 out_buf: VecDeque::new(),
55 codec2: Codec2::new(Codec2Mode::MODE_3200),
56 end_tx: None,
57 })),
58 // TODO: this doesn't work on rpi. Use default_output_device() by default
59 output_card: "default".to_owned(),
60 }
61 }
62 }
63
64 impl Default for Codec2Adapter {
65 fn default() -> Self {
66 Self::new()
67 }
68 }
69
70 struct AdapterState {
71 tx: Option<TxHandle>,
72 /// Circular buffer of output samples for playback
73 out_buf: VecDeque<i16>,
74 codec2: Codec2,
75 end_tx: Option<Sender<()>>,
76 }
77
78 impl StreamAdapter for Codec2Adapter {
79 fn adapter_registered(&self, _id: usize, handle: TxHandle) {
80 self.state.lock().unwrap().tx = Some(handle);
81
82 let (end_tx, end_rx) = channel();
83 let state = self.state.clone();
84 let output_card = self.output_card.clone();
85 std::thread::spawn(move || stream_thread(end_rx, state, output_card));
86 self.state.lock().unwrap().end_tx = Some(end_tx);
87 }
88
89 fn adapter_removed(&self) {
90 let mut state = self.state.lock().unwrap();
91 state.tx = None;
92 state.end_tx = None;
93 }
94
95 fn tnc_started(&self) {}
96
97 fn tnc_closed(&self) {}
98
99 fn stream_began(&self, _link_setup: LinkSetup) {
100 // for now we will assume:
101 // - unencrypted
102 // - data type is Voice (Codec2 3200), not Voice+Data
103 // TODO: is encryption handled here or in M17App, such that we get a decrypted stream?
104 // TODO: handle the Voice+Data combination with Codec2 1600
105 self.state.lock().unwrap().codec2 = Codec2::new(Codec2Mode::MODE_3200);
106 }
107
108 fn stream_data(&self, _frame_number: u16, _is_final: bool, data: Arc<[u8; 16]>) {
109 let mut state = self.state.lock().unwrap();
110 for encoded in data.chunks(8) {
111 if state.out_buf.len() < 1024 {
112 let mut samples = [i16::EQUILIBRIUM; 160]; // while assuming 3200
113 state.codec2.decode(&mut samples, encoded);
114 // TODO: maybe get rid of VecDeque so we can decode directly into ring buffer?
115 for s in samples {
116 state.out_buf.push_back(s);
117 }
118 } else {
119 debug!("out_buf overflow");
120 }
121 }
122 }
123 }
124
125 fn output_cb(data: &mut [i16], state: &Mutex<AdapterState>) {
126 let mut state = state.lock().unwrap();
127 for d in data {
128 *d = state.out_buf.pop_front().unwrap_or(i16::EQUILIBRIUM);
129 }
130 }
131
132 /// Create and manage the stream from a dedicated thread since it's `!Send`
133 fn stream_thread(end: Receiver<()>, state: Arc<Mutex<AdapterState>>, output_card: String) {
134 let host = cpal::default_host();
135 let device = host
136 .output_devices()
137 .unwrap()
138 .find(|d| d.name().unwrap() == output_card)
139 .unwrap();
140 let mut configs = device.supported_output_configs().unwrap();
141 // TODO: channels == 1 doesn't work on a Raspberry Pi
142 // make this configurable and support interleaving LRLR stereo samples if using 2 channels
143 let config = configs
144 .find(|c| c.channels() == 1 && c.sample_format() == SampleFormat::I16)
145 .unwrap()
146 .with_sample_rate(SampleRate(8000));
147 let stream = device
148 .build_output_stream(
149 &config.into(),
150 move |data: &mut [i16], _info: &cpal::OutputCallbackInfo| {
151 output_cb(data, &state);
152 },
153 |e| {
154 // trigger end_tx here? always more edge cases
155 debug!("error occurred in codec2 playback: {e:?}");
156 },
157 None,
158 )
159 .unwrap();
160 stream.play().unwrap();
161 let _ = end.recv();
162 // it seems concrete impls of Stream have a Drop implementation that will handle termination
163 }
164
165 pub struct WavePlayer;
166
167 impl WavePlayer {
168 pub fn play(
169 path: PathBuf,
170 tx: TxHandle,
171 source: &M17Address,
172 destination: &M17Address,
173 channel_access_number: u8,
174 ) {
175 let mut reader = hound::WavReader::open(path).unwrap();
176 let mut samples = reader.samples::<i16>();
177
178 let mut codec = Codec2::new(Codec2Mode::MODE_3200);
179 let mut in_buf = [0i16; 160];
180 let mut out_buf = [0u8; 16];
181 let mut lsf_chunk: usize = 0;
182 const TICK: Duration = Duration::from_millis(40);
183 let mut next_tick = Instant::now() + TICK;
184 let mut frame_number = 0;
185
186 let mut setup = LinkSetup::new_voice(source, destination);
187 setup.set_channel_access_number(channel_access_number);
188 tx.transmit_stream_start(&setup);
189
190 loop {
191 let mut last_one = false;
192 for out in out_buf.chunks_mut(8) {
193 for i in in_buf.iter_mut() {
194 let sample = match samples.next() {
195 Some(Ok(sample)) => sample,
196 _ => {
197 last_one = true;
198 0
199 }
200 };
201 *i = sample;
202 }
203 codec.encode(out, &in_buf);
204 }
205 tx.transmit_stream_next(&StreamFrame {
206 lich_idx: lsf_chunk as u8,
207 lich_part: setup.lich_part(lsf_chunk as u8),
208 frame_number,
209 end_of_stream: last_one,
210 stream_data: out_buf,
211 });
212 frame_number += 1;
213 lsf_chunk = (lsf_chunk + 1) % 6;
214
215 if last_one {
216 break;
217 }
218
219 std::thread::sleep(next_tick.duration_since(Instant::now()));
220 next_tick += TICK;
221 }
222 }
223 }