St0nedB commited on
Commit
cbe37da
1 Parent(s): 553e655

testing externalizing texts

Browse files
Files changed (3) hide show
  1. app.py +9 -0
  2. requirements.txt +1 -0
  3. texts.toml +41 -0
app.py CHANGED
@@ -1,6 +1,8 @@
1
  import os
2
  import sys
3
  import subprocess
 
 
4
  import numpy as np
5
  import logging
6
  import gradio as gr
@@ -34,6 +36,9 @@ DATA = np.empty((len(SNRS), N, *DATA_SHAPE), dtype=np.complex128)
34
  TRUTH = np.empty((len(SNRS), N, *ETA_SHAPE))
35
  ESTIM = np.empty((len(SNRS), N, *ETA_SHAPE))
36
 
 
 
 
37
  def install_deepest():
38
  git_token = os.environ["GIT_TOKEN"]
39
  git_url = os.environ["GIT_URL"]
@@ -174,6 +179,10 @@ def demo():
174
  """
175
  )
176
 
 
 
 
 
177
  demo.launch()
178
 
179
  def main():
 
1
  import os
2
  import sys
3
  import subprocess
4
+ import toml
5
+ from argparse import Namespace
6
  import numpy as np
7
  import logging
8
  import gradio as gr
 
36
  TRUTH = np.empty((len(SNRS), N, *ETA_SHAPE))
37
  ESTIM = np.empty((len(SNRS), N, *ETA_SHAPE))
38
 
39
+ # load texts
40
+ TEXTS = Namespace(**toml.load("texts.toml"))
41
+
42
  def install_deepest():
43
  git_token = os.environ["GIT_TOKEN"]
44
  git_url = os.environ["GIT_URL"]
 
179
  """
180
  )
181
 
182
+ gr.Markdown(
183
+ TEXTS.contact
184
+ )
185
+
186
  demo.launch()
187
 
188
  def main():
requirements.txt CHANGED
@@ -1,3 +1,4 @@
 
1
  datetime
2
  requests
3
  toml
 
1
+ argparse
2
  datetime
3
  requests
4
  toml
texts.toml ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ introduction = """
2
+ # deepest
3
+ **deepest** (short for **deep** learning parameter **est**imator) is a CNN trained to perform signal parameter estimation.
4
+ The corresponding paper can be found on [arxiv](https://arxiv.org/abs/2211.04846).
5
+
6
+ This applet lets you explore the `deepest` with data from the validationset.
7
+ You can also upload your own data and see how it works for your signals.
8
+ """
9
+
10
+ try_your_own = """
11
+ ## Try with your own data.
12
+ Good news everyone! If you want to try `deepest` with your own data, here is your chance.
13
+ Afterall there is no need to believe a paper making vague claims about an algorithms performance.
14
+ But keep in mind that slight deviations from the training-data distribution might throw off `deepest`.
15
+ Its a Neural Network after all.
16
+ You can upload a `numpy` file (both `*.npy` and `*.npz` work) with your test data.
17
+ Ensure the data meets the requirements, such that you get good results.
18
+
19
+ ### Requirements
20
+ - complex-valued baseband data for the time-variant Channel transfer function $H(f,t)$ (e.g. from a channel-sounding campaign)
21
+ - array shape must be `batch_size x f_bins x t_bins`
22
+ - ideally `f_bins`=64 and `t_bins`=64, otherwise the data will be downsampled by the 2D-DFT, which might not be ideal in all scenarios.
23
+
24
+ **Important** This demo runs on Huggingface. You are responsible for the data you upload. Do not upload any data that is confidential or unsuitable in this context.
25
+ """
26
+
27
+ acknowledgements = """
28
+ ## Acknowledgements
29
+ The authors acknowledge the financial support by the Federal Ministry of Education and Research of Germany in the project “Open6GHub” (grant number: 16KISK015).
30
+
31
+ The authors give special thanks to Henning Schwanbeck (HPC team leader) of the TU Ilmenau Computer Center for his valuable support.
32
+ """
33
+
34
+ contact = """
35
+ ## Contact
36
+ If you have technical or scientific questions or encounter any issues in the use of this applet, please let me know.
37
+ You can either
38
+ - write a me an email to [steffen.schieler@tu-ilmenau.de](mailto:steffen.schieler@tu-ilmenau.de)
39
+ - or start a new discussion in the [Community Tab](https://huggingface.co/spaces/EMS-TU-Ilmenau/deepest-demo/discussions)
40
+
41
+ """