@@ -72,7 +72,9 @@ attrs==25.3.0
72
72
audioread == 3.0.1
73
73
# via librosa
74
74
av == 14.3.0
75
- # via qwen-vl-utils
75
+ # via
76
+ # flashtts
77
+ # qwen-vl-utils
76
78
backoff == 2.2.1
77
79
# via posthog
78
80
blake3 == 1.0.4
@@ -181,14 +183,15 @@ editdistance==0.8.1
181
183
# funasr
182
184
einops == 0.8.1
183
185
# via
186
+ # flashtts
184
187
# infinity-emb
185
188
# lmdeploy
186
189
# modelscope
187
190
# sglang
188
191
# snac
189
192
# vllm
190
193
einx == 0.3.0
191
- # via gpt-server (pyproject.toml)
194
+ # via flashtts
192
195
email-validator == 2.2.0
193
196
# via fastapi
194
197
evalscope == 0.10.1
@@ -205,6 +208,7 @@ fastapi==0.115.0
205
208
# via
206
209
# gpt-server (pyproject.toml)
207
210
# evalscope
211
+ # flashtts
208
212
# fschat
209
213
# infinity-emb
210
214
# lmdeploy
@@ -230,6 +234,8 @@ flashinfer-python==0.2.3+cu124torch2.5
230
234
# via
231
235
# gpt-server (pyproject.toml)
232
236
# sglang
237
+ flashtts == 0.1.0
238
+ # via gpt-server (pyproject.toml)
233
239
flatbuffers == 25.2.10
234
240
# via onnxruntime
235
241
fonttools == 4.57.0
@@ -265,24 +271,25 @@ googleapis-common-protos==1.70.0
265
271
# opentelemetry-exporter-otlp-proto-http
266
272
grpcio == 1.71.0
267
273
# via opentelemetry-exporter-otlp-proto-grpc
268
- h11 == 0.14 .0
274
+ h11 == 0.16 .0
269
275
# via
270
276
# httpcore
271
277
# uvicorn
272
278
hf-transfer == 0.1.9
273
279
# via
274
280
# infinity-emb
275
281
# sglang
276
- hf-xet == 1.0.3
282
+ hf-xet == 1.0.4
277
283
# via huggingface-hub
278
- httpcore == 1.0.8
284
+ httpcore == 1.0.9
279
285
# via httpx
280
286
httptools == 0.6.4
281
287
# via uvicorn
282
288
httpx == 0.27.2
283
289
# via
284
290
# anthropic
285
291
# fastapi
292
+ # flashtts
286
293
# fschat
287
294
# litellm
288
295
# openai
@@ -317,6 +324,8 @@ importlib-metadata==8.0.0
317
324
# litellm
318
325
# opentelemetry-api
319
326
# vllm
327
+ importlib-resources == 6.5.2
328
+ # via wetextprocessing
320
329
infinity-emb == 0.0.73
321
330
# via gpt-server (pyproject.toml)
322
331
interegular == 0.3.3
@@ -382,10 +391,12 @@ latex2mathml==3.77.0
382
391
lazy-loader == 0.4
383
392
# via librosa
384
393
librosa == 0.11.0
385
- # via funasr
394
+ # via
395
+ # flashtts
396
+ # funasr
386
397
litellm == 1.60.0
387
398
# via sglang
388
- llguidance == 0.7.18
399
+ llguidance == 0.7.19
389
400
# via
390
401
# sglang
391
402
# vllm
@@ -565,7 +576,9 @@ nvidia-nvjitlink-cu12==12.4.127
565
576
nvidia-nvtx-cu12 == 12.4.127
566
577
# via torch
567
578
omegaconf == 2.3.0
568
- # via hydra-core
579
+ # via
580
+ # flashtts
581
+ # hydra-core
569
582
onnx == 1.17.0
570
583
# via optimum
571
584
onnxruntime == 1.21.1
@@ -579,7 +592,7 @@ openai==1.55.3
579
592
# sglang
580
593
# vllm
581
594
openai-whisper == 20240930
582
- # via gpt-server (pyproject.toml)
595
+ # via flashtts
583
596
opencv-python-headless == 4.11.0.86
584
597
# via
585
598
# mistral-common
@@ -701,7 +714,7 @@ pooch==1.8.2
701
714
# via librosa
702
715
portalocker == 3.1.1
703
716
# via sacrebleu
704
- posthog == 3.25 .0
717
+ posthog == 4.0 .0
705
718
# via infinity-emb
706
719
prometheus-client == 0.21.1
707
720
# via
@@ -720,7 +733,7 @@ propcache==0.3.1
720
733
# via
721
734
# aiohttp
722
735
# yarl
723
- protobuf == 4.25.6
736
+ protobuf == 4.25.7
724
737
# via
725
738
# googleapis-common-protos
726
739
# lmdeploy
@@ -781,9 +794,11 @@ pygments==2.19.1
781
794
# markdown2
782
795
# rich
783
796
pyloudnorm == 0.1.1
784
- # via gpt-server (pyproject.toml)
797
+ # via flashtts
785
798
pympler == 1.1
786
799
# via evalscope
800
+ pynini == 2.1.6
801
+ # via wetextprocessing
787
802
pynndescent == 0.5.13
788
803
# via umap-learn
789
804
pynvml == 12.0.0
@@ -807,6 +822,7 @@ python-json-logger==3.3.0
807
822
python-multipart == 0.0.20
808
823
# via
809
824
# fastapi
825
+ # flashtts
810
826
# sglang
811
827
pytorch-wpe == 0.0.1
812
828
# via funasr
@@ -887,7 +903,7 @@ rich==13.9.4
887
903
# rich-toolkit
888
904
# streamlit
889
905
# typer
890
- rich-toolkit == 0.14.1
906
+ rich-toolkit == 0.14.3
891
907
# via fastapi-cli
892
908
rouge-chinese == 1.0.3
893
909
# via evalscope
@@ -974,7 +990,7 @@ six==1.17.0
974
990
smmap == 5.0.2
975
991
# via gitdb
976
992
snac == 1.2.1
977
- # via gpt-server (pyproject.toml)
993
+ # via flashtts
978
994
sniffio == 1.3.1
979
995
# via
980
996
# anthropic
@@ -986,12 +1002,15 @@ sortedcontainers==2.4.0
986
1002
soundfile == 0.13.1
987
1003
# via
988
1004
# --override (workspace)
1005
+ # flashtts
989
1006
# funasr
990
1007
# infinity-emb
991
1008
# librosa
992
1009
# sglang
993
1010
soxr == 0.5.0.post1
994
- # via librosa
1011
+ # via
1012
+ # flashtts
1013
+ # librosa
995
1014
srt == 3.5.3
996
1015
# via edge-tts
997
1016
sse-starlette == 2.1.3
@@ -1119,6 +1138,7 @@ transformers==4.50.0
1119
1138
# colpali-engine
1120
1139
# compressed-tensors
1121
1140
# evalscope
1141
+ # flashtts
1122
1142
# lmdeploy
1123
1143
# modelscope
1124
1144
# optimum
@@ -1183,6 +1203,7 @@ uvicorn==0.32.1
1183
1203
# via
1184
1204
# fastapi
1185
1205
# fastapi-cli
1206
+ # flashtts
1186
1207
# fschat
1187
1208
# infinity-emb
1188
1209
# lmdeploy
@@ -1206,6 +1227,8 @@ wcwidth==0.2.13
1206
1227
# via prompt-toolkit
1207
1228
websockets == 15.0.1
1208
1229
# via uvicorn
1230
+ wetextprocessing == 1.0.4.1
1231
+ # via flashtts
1209
1232
wrapt == 1.17.2
1210
1233
# via deprecated
1211
1234
xformers == 0.0.29.post2
0 commit comments