Skip to content

Commit

Permalink
自动剪辑部分,在输出音频片段处,不再是每一个 chunk 输出一个音频文件,那样最后合并有可能出现音画不同步。现在是每十分钟输出一个 wa…
Browse files Browse the repository at this point in the history
…v 文件片段,每个片段最大占用 403.7MB 内存,不至于挤爆内存。
  • Loading branch information
HaujetZhao committed Aug 14, 2020
1 parent 76de8b0 commit bc5f6c8
Showing 1 changed file with 11 additions and 3 deletions.
14 changes: 11 additions & 3 deletions QuickCut.py
Original file line number Diff line number Diff line change
Expand Up @@ -5532,10 +5532,14 @@ def run(self):
lastExistingFrame = None
i = 0
concat = open(self.TEMP_FOLDER + "/concat.txt", "a")
outputAudioData = np.zeros((0, audioData.shape[1]))
print('len of chunks: %s' % len(chunks))
chunksNumber = len(chunks)
for chunk in chunks:
i += 1
print(i)
# 返回一个数量为 0 的列表,数据类型为声音 shape[1]
outputAudioData = np.zeros((0, audioData.shape[1]))

# 得到一块音频区间
audioChunk = audioData[int(chunk[0] * samplesPerFrame):int(chunk[1] * samplesPerFrame)]

Expand Down Expand Up @@ -5572,6 +5576,7 @@ def run(self):
# 将这个数列乘以 2 ,变成2轴数列,就能用于双声道
mask = np.repeat(premask[:, np.newaxis], 2, axis=1) # make the fade-envelope mask stereo
# 淡入
# print(outputAudioData[0:0 + AUDIO_FADE_ENVELOPE_SIZE])
outputAudioData[0:0 + AUDIO_FADE_ENVELOPE_SIZE] *= mask
# 淡出
outputAudioData[leng - AUDIO_FADE_ENVELOPE_SIZE:leng] *= 1 - mask
Expand All @@ -5595,8 +5600,11 @@ def run(self):
self.copyFrame(lastExistingFrame, outputFrame)
# 记一下,原始音频输出帧,输出到哪一个采样点了,这就是下回输出的起始点
outputPointer = endPointer
wavfile.write(self.TEMP_FOLDER + '/audioNew_' + '%06d' % i + '.wav', SAMPLE_RATE, outputAudioData)
concat.write("file " + "audioNew_" + "%06d" % i + ".wav\n")
print(len(outputAudioData) / 44100)
if len(outputAudioData) >= 44100 * 60 * 10 or i == chunksNumber:
wavfile.write(self.TEMP_FOLDER + '/audioNew_' + '%06d' % i + '.wav', SAMPLE_RATE, outputAudioData)
concat.write("file " + "audioNew_" + "%06d" % i + ".wav\n")
outputAudioData = np.zeros((0, audioData.shape[1]))
concat.close()

self.print(self.tr('\n\n现在开始合并音频片段\n\n\n'))
Expand Down

0 comments on commit bc5f6c8

Please sign in to comment.