Skip to content

Commit 126fb3a

Browse files
committed
fix: cleanup
1 parent 2d04794 commit 126fb3a

File tree

2 files changed

+3
-9
lines changed

2 files changed

+3
-9
lines changed

nodes/audio_utils/save_audio_tensor.py

-4
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,5 @@
1-
import queue
2-
import logging
31
from comfystream import tensor_cache
42

5-
logger = logging.getLogger(__name__)
6-
73
class SaveAudioTensor:
84
CATEGORY = "audio_utils"
95
RETURN_TYPES = ()

server/pipeline.py

+3-5
Original file line numberDiff line numberDiff line change
@@ -11,16 +11,14 @@
1111

1212
class Pipeline:
1313
def __init__(self, **kwargs):
14-
self.client = ComfyStreamClient(**kwargs, max_workers=5) # hardcoded max workers
14+
self.client = ComfyStreamClient(**kwargs, max_workers=5) # TODO: hardcoded max workers, should it be configurable?
1515

1616
self.video_futures = asyncio.Queue()
1717
self.audio_futures = asyncio.Queue()
18-
19-
self.resampler = av.audio.resampler.AudioResampler(format='s16', layout='mono', rate=48000) # find a better way to convert to mono
2018

2119
async def warm(self):
2220
dummy_video_inp = torch.randn(1, 512, 512, 3)
23-
dummy_audio_inp = np.random.randint(-32768, 32767, 48 * 20, dtype=np.int16) # has to be more than the buffer size in comfy workflow
21+
dummy_audio_inp = np.random.randint(-32768, 32767, 48 * 20, dtype=np.int16) # TODO: might affect the workflow, due to buffering
2422

2523
for _ in range(WARMUP_RUNS):
2624
image_out_fut = self.client.put_video_input(dummy_video_inp)
@@ -54,7 +52,7 @@ def video_preprocess(self, frame: av.VideoFrame) -> torch.Tensor:
5452
return torch.from_numpy(frame_np).unsqueeze(0)
5553

5654
def audio_preprocess(self, frame: av.AudioFrame) -> torch.Tensor:
57-
return self.resampler.resample(frame)[0].to_ndarray().flatten()
55+
return frame.to_ndarray().ravel().reshape(-1, 2).mean(axis=1).astype(np.int16)
5856

5957
def video_postprocess(self, output: torch.Tensor) -> av.VideoFrame:
6058
return av.VideoFrame.from_ndarray(

0 commit comments

Comments
 (0)