Pendrokar commited on
Commit
146bfe1
·
1 Parent(s): 5d11a38

dump cached sample paths to JSON file and load it on Space reset

Browse files
Files changed (2) hide show
  1. app/sample_caching.py +19 -0
  2. app/synth.py +11 -3
app/sample_caching.py CHANGED
@@ -1,6 +1,7 @@
1
  import gradio as gr
2
  import itertools
3
  import random
 
4
  from typing import List, Tuple, Set, Dict
5
  from hashlib import md5, sha1
6
  import spaces
@@ -17,8 +18,26 @@ class Sample:
17
  self.transcript = transcript
18
  self.modelName = modelName
19
 
 
 
 
 
 
 
 
20
  # cache audio samples for quick voting
21
  cached_samples: List[Sample] = []
 
 
 
 
 
 
 
 
 
 
 
22
 
23
  @spaces.GPU(duration=10)
24
  def asr_cached_for_dataset():
 
1
  import gradio as gr
2
  import itertools
3
  import random
4
+ import json
5
  from typing import List, Tuple, Set, Dict
6
  from hashlib import md5, sha1
7
  import spaces
 
18
  self.transcript = transcript
19
  self.modelName = modelName
20
 
21
+ def to_dict(self):
22
+ return {
23
+ 'filename': self.filename,
24
+ 'transcript': self.transcript,
25
+ 'modelName': self.modelName,
26
+ }
27
+
28
  # cache audio samples for quick voting
29
  cached_samples: List[Sample] = []
30
+ # cached_samples.append(Sample("audio1.mp3", "Hello, how are you?", "model1"))
31
+ # cached_samples.append(Sample("audio2.mp3", "Hello, how are you?", "model2"))
32
+
33
+ # load temporary samples
34
+ json_data = ''
35
+ try:
36
+ with open("_cached_samples.json", "r") as read:
37
+ loaded_samples = json.load(read)
38
+ cached_samples = [Sample(**json_data) for json_data in loaded_samples]
39
+ except:
40
+ pass
41
 
42
  @spaces.GPU(duration=10)
43
  def asr_cached_for_dataset():
app/synth.py CHANGED
@@ -288,6 +288,14 @@ def synthandreturn(text, autoplay, request: gr.Request):
288
  print('Error when trying to cache sample')
289
  return False
290
 
 
 
 
 
 
 
 
 
291
  mdl1k = mdl1
292
  mdl2k = mdl2
293
  print(mdl1k, mdl2k)
@@ -331,7 +339,7 @@ def synthandreturn(text, autoplay, request: gr.Request):
331
  print(f"Retrieving models {mdl1k} and {mdl2k} from API")
332
  return (
333
  text,
334
- "Synthesize",
335
  gr.update(visible=True), # r2
336
  mdl1, # model1
337
  mdl2, # model2
@@ -430,7 +438,7 @@ def synthandreturn_battle(text, mdl1, mdl2, autoplay):
430
  print(f"Retrieving models {mdl1k} and {mdl2k} from API")
431
  return (
432
  text,
433
- "Synthesize",
434
  gr.update(visible=True), # r2
435
  mdl1, # model1
436
  mdl2, # model2
@@ -450,7 +458,7 @@ def randomsent_battle():
450
  def clear_stuff():
451
  return [
452
  gr.update(visible=True, value="", elem_classes=[]),
453
- "Synthesize",
454
  gr.update(visible=False), # r2
455
  '', # model1
456
  '', # model2
 
288
  print('Error when trying to cache sample')
289
  return False
290
 
291
+ # save list to JSON file
292
+ cached_sample_dict = [cached_sample.to_dict() for cached_sample in cached_samples]
293
+ try:
294
+ with open("_cached_samples.json", "w") as write:
295
+ json.dump( cached_sample_dict , write )
296
+ except:
297
+ pass
298
+
299
  mdl1k = mdl1
300
  mdl2k = mdl2
301
  print(mdl1k, mdl2k)
 
339
  print(f"Retrieving models {mdl1k} and {mdl2k} from API")
340
  return (
341
  text,
342
+ "Synthesize 🐢",
343
  gr.update(visible=True), # r2
344
  mdl1, # model1
345
  mdl2, # model2
 
438
  print(f"Retrieving models {mdl1k} and {mdl2k} from API")
439
  return (
440
  text,
441
+ "Synthesize 🐢",
442
  gr.update(visible=True), # r2
443
  mdl1, # model1
444
  mdl2, # model2
 
458
  def clear_stuff():
459
  return [
460
  gr.update(visible=True, value="", elem_classes=[]),
461
+ "Synthesize 🐢",
462
  gr.update(visible=False), # r2
463
  '', # model1
464
  '', # model2