Encode different episodes simultaneously
This results in less nice log output, but if it’s faster then who cares?
This commit is contained in:
parent
ce403a97a3
commit
4a6813d33a
24
encode.py
24
encode.py
|
@ -1,7 +1,8 @@
|
|||
#!/usr/bin/env python3
|
||||
import base64
|
||||
import concurrent.futures
|
||||
import multiprocessing
|
||||
import os
|
||||
import threading
|
||||
import xml.etree.ElementTree as ET
|
||||
from datetime import datetime
|
||||
from subprocess import run
|
||||
|
@ -77,9 +78,9 @@ def encode_episode(podcast, episode, format):
|
|||
|
||||
audio.save()
|
||||
|
||||
print(f" {format}", end="", flush=True)
|
||||
print(f"[✔️] {episode['file_base']}.{format}")
|
||||
else:
|
||||
print(f" ({format})", end="", flush=True)
|
||||
print(f"[⏭️] {episode['file_base']}.{format}")
|
||||
|
||||
|
||||
os.makedirs("static/episodes", exist_ok=True)
|
||||
|
@ -95,6 +96,8 @@ podcast = {
|
|||
"poster": "static" + urlparse(channel.find("image").find("url").text).path,
|
||||
}
|
||||
|
||||
pool = concurrent.futures.ThreadPoolExecutor(max_workers=multiprocessing.cpu_count())
|
||||
|
||||
for item in channel.findall("item"):
|
||||
episode = {
|
||||
"title": item.find("title").text,
|
||||
|
@ -114,18 +117,7 @@ for item in channel.findall("item"):
|
|||
)[0],
|
||||
}
|
||||
|
||||
print(episode["file_base"], end="", flush=True)
|
||||
|
||||
threads = []
|
||||
|
||||
for format in common.FORMATS.items():
|
||||
thread = threading.Thread(
|
||||
target=encode_episode, args=(podcast, episode, format), daemon=True
|
||||
)
|
||||
thread.start()
|
||||
threads.append(thread)
|
||||
pool.submit(encode_episode, podcast, episode, format)
|
||||
|
||||
for thread in threads:
|
||||
thread.join()
|
||||
|
||||
print()
|
||||
pool.shutdown(wait=True)
|
||||
|
|
Loading…
Reference in a new issue