]> code.octet-stream.net Git - m17rt/blob - m17codec2/src/lib.rs
Add metadata to m17rt-mod Cargo.toml
[m17rt] / m17codec2 / src / lib.rs
1 use codec2::{Codec2, Codec2Mode};
2 use cpal::traits::DeviceTrait;
3 use cpal::traits::HostTrait;
4 use cpal::traits::StreamTrait;
5 use cpal::{Sample, SampleFormat, SampleRate};
6 use log::debug;
7 use m17app::adapter::StreamAdapter;
8 use m17app::app::TxHandle;
9 use m17core::address::Address;
10 use m17core::address::Callsign;
11 use m17core::protocol::LsfFrame;
12 use m17core::protocol::StreamFrame;
13 use std::collections::VecDeque;
14 use std::fs::File;
15 use std::io::Write;
16 use std::path::Path;
17 use std::path::PathBuf;
18 use std::sync::{
19 mpsc::{channel, Receiver, Sender},
20 Arc, Mutex,
21 };
22 use std::time::Duration;
23 use std::time::Instant;
24
25 pub fn decode_codec2<P: AsRef<Path>>(data: &[u8], out_path: P) -> Vec<i16> {
26 let codec2 = Codec2::new(Codec2Mode::MODE_3200);
27 let var_name = codec2;
28 let mut codec = var_name;
29 let mut all_samples: Vec<i16> = vec![];
30 for i in 0..(data.len() / 8) {
31 let mut samples = vec![0; codec.samples_per_frame()];
32 codec.decode(&mut samples, &data[i * 8..((i + 1) * 8)]);
33 all_samples.append(&mut samples);
34 }
35
36 // dude this works
37 let mut speech_out = File::create(out_path).unwrap();
38 for b in &all_samples {
39 speech_out.write_all(&b.to_le_bytes()).unwrap();
40 }
41 all_samples
42 }
43
44 pub struct Codec2Adapter {
45 state: Arc<Mutex<AdapterState>>,
46 // TODO: make this configurable
47 output_card: String,
48 }
49
50 impl Codec2Adapter {
51 pub fn new() -> Self {
52 Self {
53 state: Arc::new(Mutex::new(AdapterState {
54 tx: None,
55 out_buf: VecDeque::new(),
56 codec2: Codec2::new(Codec2Mode::MODE_3200),
57 end_tx: None,
58 })),
59 // TODO: this doesn't work on rpi. Use default_output_device() by default
60 output_card: "default".to_owned(),
61 }
62 }
63 }
64
65 struct AdapterState {
66 tx: Option<TxHandle>,
67 /// Circular buffer of output samples for playback
68 out_buf: VecDeque<i16>,
69 codec2: Codec2,
70 end_tx: Option<Sender<()>>,
71 }
72
73 impl StreamAdapter for Codec2Adapter {
74 fn adapter_registered(&self, _id: usize, handle: TxHandle) {
75 self.state.lock().unwrap().tx = Some(handle);
76
77 let (end_tx, end_rx) = channel();
78 let state = self.state.clone();
79 let output_card = self.output_card.clone();
80 std::thread::spawn(move || stream_thread(end_rx, state, output_card));
81 self.state.lock().unwrap().end_tx = Some(end_tx);
82 }
83
84 fn adapter_removed(&self) {
85 let mut state = self.state.lock().unwrap();
86 state.tx = None;
87 state.end_tx = None;
88 }
89
90 fn tnc_started(&self) {}
91
92 fn tnc_closed(&self) {}
93
94 fn stream_began(&self, _lsf: LsfFrame) {
95 // for now we will assume:
96 // - unencrypted
97 // - data type is Voice (Codec2 3200), not Voice+Data
98 // TODO: is encryption handled here or in M17App, such that we get a decrypted stream?
99 // TODO: handle the Voice+Data combination with Codec2 1600
100 self.state.lock().unwrap().codec2 = Codec2::new(Codec2Mode::MODE_3200);
101 }
102
103 fn stream_data(&self, _frame_number: u16, _is_final: bool, data: Arc<[u8; 16]>) {
104 let mut state = self.state.lock().unwrap();
105 for encoded in data.chunks(8) {
106 if state.out_buf.len() < 1024 {
107 let mut samples = [i16::EQUILIBRIUM; 160]; // while assuming 3200
108 state.codec2.decode(&mut samples, encoded);
109 // TODO: maybe get rid of VecDeque so we can decode directly into ring buffer?
110 for s in samples {
111 state.out_buf.push_back(s);
112 }
113 } else {
114 debug!("out_buf overflow");
115 }
116 }
117 }
118 }
119
120 fn output_cb(data: &mut [i16], state: &Mutex<AdapterState>) {
121 let mut state = state.lock().unwrap();
122 for d in data {
123 *d = state.out_buf.pop_front().unwrap_or(i16::EQUILIBRIUM);
124 }
125 }
126
127 /// Create and manage the stream from a dedicated thread since it's `!Send`
128 fn stream_thread(end: Receiver<()>, state: Arc<Mutex<AdapterState>>, output_card: String) {
129 let host = cpal::default_host();
130 let device = host
131 .output_devices()
132 .unwrap()
133 .find(|d| d.name().unwrap() == output_card)
134 .unwrap();
135 let mut configs = device.supported_output_configs().unwrap();
136 // TODO: channels == 1 doesn't work on a Raspberry Pi
137 // make this configurable and support interleaving LRLR stereo samples if using 2 channels
138 let config = configs
139 .find(|c| c.channels() == 1 && c.sample_format() == SampleFormat::I16)
140 .unwrap()
141 .with_sample_rate(SampleRate(8000));
142 let stream = device
143 .build_output_stream(
144 &config.into(),
145 move |data: &mut [i16], info: &cpal::OutputCallbackInfo| {
146 output_cb(data, &state);
147 },
148 |e| {
149 // trigger end_tx here? always more edge cases
150 debug!("error occurred in codec2 playback: {e:?}");
151 },
152 None,
153 )
154 .unwrap();
155 stream.play().unwrap();
156 let _ = end.recv();
157 // it seems concrete impls of Stream have a Drop implementation that will handle termination
158 }
159
160 pub struct WavePlayer;
161
162 impl WavePlayer {
163 pub fn play(path: PathBuf, tx: TxHandle) {
164 let mut reader = hound::WavReader::open(path).unwrap();
165 let mut samples = reader.samples::<i16>();
166
167 let mut codec = Codec2::new(Codec2Mode::MODE_3200);
168 let mut in_buf = [0i16; 160];
169 let mut out_buf = [0u8; 16];
170 let mut lsf_chunk: usize = 0;
171 const TICK: Duration = Duration::from_millis(40);
172 let mut next_tick = Instant::now() + TICK;
173 let mut frame_number = 0;
174
175 // TODO: need a better way to create addresses from std strings
176
177 let lsf = LsfFrame::new_voice(
178 &Address::Callsign(Callsign(b"VK7XT ".clone())),
179 &Address::Broadcast,
180 );
181
182 tx.transmit_stream_start(lsf.clone());
183
184 loop {
185 let mut last_one = false;
186 for mut out in out_buf.chunks_mut(8) {
187 for i in 0..160 {
188 let sample = match samples.next() {
189 Some(Ok(sample)) => sample,
190 _ => {
191 last_one = true;
192 0
193 }
194 };
195 in_buf[i] = sample;
196 }
197 codec.encode(&mut out, &in_buf);
198 }
199 tx.transmit_stream_next(StreamFrame {
200 lich_idx: lsf_chunk as u8,
201 lich_part: lsf.0[lsf_chunk * 5..(lsf_chunk + 1) * 5]
202 .try_into()
203 .unwrap(),
204 frame_number,
205 end_of_stream: last_one,
206 stream_data: out_buf.clone(),
207 });
208 frame_number += 1;
209 lsf_chunk = (lsf_chunk + 1) % 6;
210
211 if last_one {
212 break;
213 }
214
215 std::thread::sleep(next_tick.duration_since(Instant::now()));
216 next_tick += TICK;
217 }
218 }
219 }