2020-12-18 02:27:08 +01:00
|
|
|
import logging
|
2020-12-15 00:48:15 +01:00
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import string
|
|
|
|
import sys
|
|
|
|
import time
|
|
|
|
|
|
|
|
import requests
|
|
|
|
from bs4 import BeautifulSoup as bso
|
2020-12-18 02:27:08 +01:00
|
|
|
from mutagen.flac import FLAC
|
|
|
|
from mutagen.mp3 import EasyMP3
|
2020-12-15 00:48:15 +01:00
|
|
|
from pathvalidate import sanitize_filename
|
|
|
|
|
|
|
|
import qobuz_dl.spoofbuz as spoofbuz
|
|
|
|
from qobuz_dl import downloader, qopy
|
2020-12-18 03:52:10 +01:00
|
|
|
from qobuz_dl.color import CYAN, OFF, RED, YELLOW, DF, RESET
|
2021-02-03 22:05:00 +01:00
|
|
|
from qobuz_dl.exceptions import NonStreamable
|
2020-12-20 21:33:41 +01:00
|
|
|
from qobuz_dl.db import create_db, handle_download_id
|
2020-12-15 21:23:11 +01:00
|
|
|
|
2020-12-15 00:48:15 +01:00
|
|
|
WEB_URL = "https://play.qobuz.com/"
|
|
|
|
ARTISTS_SELECTOR = "td.chartlist-artist > a"
|
|
|
|
TITLE_SELECTOR = "td.chartlist-name > a"
|
2020-12-15 21:23:11 +01:00
|
|
|
EXTENSIONS = (".mp3", ".flac")
|
2021-02-24 23:50:57 +01:00
|
|
|
QUALITIES = {5: "5 - MP3", 6: "6 - FLAC",
|
|
|
|
7: "7 - 24B<96kHz", 27: "27 - 24B>96kHz"}
|
2020-12-18 02:27:08 +01:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
2020-12-15 00:48:15 +01:00
|
|
|
|
|
|
|
|
|
|
|
class PartialFormatter(string.Formatter):
|
|
|
|
def __init__(self, missing="n/a", bad_fmt="n/a"):
|
|
|
|
self.missing, self.bad_fmt = missing, bad_fmt
|
|
|
|
|
|
|
|
def get_field(self, field_name, args, kwargs):
|
|
|
|
try:
|
2021-02-24 23:50:57 +01:00
|
|
|
val = super(PartialFormatter, self).get_field(field_name,
|
|
|
|
args, kwargs)
|
2020-12-15 00:48:15 +01:00
|
|
|
except (KeyError, AttributeError):
|
|
|
|
val = None, field_name
|
|
|
|
return val
|
|
|
|
|
|
|
|
def format_field(self, value, spec):
|
|
|
|
if not value:
|
|
|
|
return self.missing
|
|
|
|
try:
|
|
|
|
return super(PartialFormatter, self).format_field(value, spec)
|
|
|
|
except ValueError:
|
|
|
|
if self.bad_fmt:
|
|
|
|
return self.bad_fmt
|
|
|
|
raise
|
|
|
|
|
|
|
|
|
|
|
|
class QobuzDL:
|
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
directory="Qobuz Downloads",
|
|
|
|
quality=6,
|
|
|
|
embed_art=False,
|
|
|
|
lucky_limit=1,
|
|
|
|
lucky_type="album",
|
|
|
|
interactive_limit=20,
|
|
|
|
ignore_singles_eps=False,
|
2020-12-15 21:23:11 +01:00
|
|
|
no_m3u_for_playlists=False,
|
2020-12-17 00:07:40 +01:00
|
|
|
quality_fallback=True,
|
2020-12-18 02:55:42 +01:00
|
|
|
cover_og_quality=False,
|
2020-12-19 17:56:59 +01:00
|
|
|
no_cover=False,
|
2020-12-20 21:33:41 +01:00
|
|
|
downloads_db=None,
|
2020-12-15 00:48:15 +01:00
|
|
|
):
|
|
|
|
self.directory = self.create_dir(directory)
|
|
|
|
self.quality = quality
|
|
|
|
self.embed_art = embed_art
|
|
|
|
self.lucky_limit = lucky_limit
|
|
|
|
self.lucky_type = lucky_type
|
|
|
|
self.interactive_limit = interactive_limit
|
|
|
|
self.ignore_singles_eps = ignore_singles_eps
|
2020-12-15 21:23:11 +01:00
|
|
|
self.no_m3u_for_playlists = no_m3u_for_playlists
|
2020-12-17 00:07:40 +01:00
|
|
|
self.quality_fallback = quality_fallback
|
2020-12-18 02:55:42 +01:00
|
|
|
self.cover_og_quality = cover_og_quality
|
2020-12-18 22:17:20 +01:00
|
|
|
self.no_cover = no_cover
|
2020-12-20 21:33:41 +01:00
|
|
|
self.downloads_db = create_db(downloads_db) if downloads_db else None
|
2020-12-15 00:48:15 +01:00
|
|
|
|
|
|
|
def initialize_client(self, email, pwd, app_id, secrets):
|
|
|
|
self.client = qopy.Client(email, pwd, app_id, secrets)
|
2020-12-18 02:55:42 +01:00
|
|
|
logger.info(f"{YELLOW}Set quality: {QUALITIES[int(self.quality)]}\n")
|
2020-12-15 00:48:15 +01:00
|
|
|
|
|
|
|
def get_tokens(self):
|
|
|
|
spoofer = spoofbuz.Spoofer()
|
|
|
|
self.app_id = spoofer.getAppId()
|
|
|
|
self.secrets = [
|
|
|
|
secret for secret in spoofer.getSecrets().values() if secret
|
|
|
|
] # avoid empty fields
|
|
|
|
|
|
|
|
def create_dir(self, directory=None):
|
|
|
|
fix = os.path.normpath(directory)
|
|
|
|
os.makedirs(fix, exist_ok=True)
|
|
|
|
return fix
|
|
|
|
|
|
|
|
def get_id(self, url):
|
|
|
|
return re.match(
|
2021-02-24 23:50:57 +01:00
|
|
|
r"https?://(?:w{0,3}|play|open)\.qobuz\.com/(?:(?:album|track"
|
|
|
|
r"|artist|playlist|label)/|[a-z]{2}-[a-z]{2}/album/-?\w+(?:-\w+)*"
|
|
|
|
r"-?/|user/library/favorites/)(\w+)",
|
2020-12-15 00:48:15 +01:00
|
|
|
url,
|
|
|
|
).group(1)
|
|
|
|
|
2021-02-24 23:50:57 +01:00
|
|
|
def get_type(self, url):
|
|
|
|
if re.match(r'https?', url) is not None:
|
|
|
|
url_type = url.split('/')[3]
|
|
|
|
if url_type not in ['album', 'artist', 'playlist',
|
|
|
|
'track', 'label']:
|
|
|
|
if url_type == "user":
|
|
|
|
url_type = url.split('/')[-1]
|
|
|
|
else:
|
|
|
|
# url is from Qobuz store
|
|
|
|
# e.g. "https://www.qobuz.com/us-en/album/..."
|
|
|
|
url_type = url.split('/')[4]
|
|
|
|
else:
|
|
|
|
# url missing base
|
|
|
|
# e.g. "/us-en/album/{artist}/{id}"
|
|
|
|
url_type = url.split('/')[2]
|
|
|
|
return url_type
|
|
|
|
|
2020-12-15 00:48:15 +01:00
|
|
|
def download_from_id(self, item_id, album=True, alt_path=None):
|
2020-12-20 21:33:41 +01:00
|
|
|
if handle_download_id(self.downloads_db, item_id, add_id=False):
|
2021-01-26 23:42:29 +01:00
|
|
|
logger.info(
|
|
|
|
f"{OFF}This release ID ({item_id}) was already downloaded "
|
|
|
|
"according to the local database.\nUse the '--no-db' flag "
|
|
|
|
"to bypass this."
|
|
|
|
)
|
2020-12-20 21:33:41 +01:00
|
|
|
return
|
2020-12-18 18:22:12 +01:00
|
|
|
try:
|
|
|
|
downloader.download_id_by_type(
|
|
|
|
self.client,
|
|
|
|
item_id,
|
2021-01-26 23:42:29 +01:00
|
|
|
alt_path or self.directory,
|
2020-12-18 18:22:12 +01:00
|
|
|
str(self.quality),
|
|
|
|
album,
|
|
|
|
self.embed_art,
|
|
|
|
self.ignore_singles_eps,
|
|
|
|
self.quality_fallback,
|
|
|
|
self.cover_og_quality,
|
2020-12-18 22:17:20 +01:00
|
|
|
self.no_cover,
|
2020-12-18 18:22:12 +01:00
|
|
|
)
|
2020-12-20 21:33:41 +01:00
|
|
|
handle_download_id(self.downloads_db, item_id, add_id=True)
|
2021-02-03 22:05:00 +01:00
|
|
|
except (requests.exceptions.RequestException, NonStreamable) as e:
|
2021-02-05 00:08:21 +01:00
|
|
|
logger.error(f"{RED}Error getting release: {e}. Skipping...")
|
2020-12-15 00:48:15 +01:00
|
|
|
|
|
|
|
def handle_url(self, url):
|
|
|
|
possibles = {
|
|
|
|
"playlist": {
|
|
|
|
"func": self.client.get_plist_meta,
|
|
|
|
"iterable_key": "tracks",
|
|
|
|
},
|
|
|
|
"artist": {
|
|
|
|
"func": self.client.get_artist_meta,
|
|
|
|
"iterable_key": "albums",
|
|
|
|
},
|
|
|
|
"label": {
|
|
|
|
"func": self.client.get_label_meta,
|
|
|
|
"iterable_key": "albums",
|
|
|
|
},
|
|
|
|
"album": {"album": True, "func": None, "iterable_key": None},
|
|
|
|
"track": {"album": False, "func": None, "iterable_key": None},
|
|
|
|
}
|
|
|
|
try:
|
2021-02-24 23:50:57 +01:00
|
|
|
url_type = self.get_type(url)
|
2020-12-15 00:48:15 +01:00
|
|
|
type_dict = possibles[url_type]
|
|
|
|
item_id = self.get_id(url)
|
|
|
|
except (KeyError, IndexError):
|
2020-12-18 02:27:08 +01:00
|
|
|
logger.info(
|
2021-02-24 23:50:57 +01:00
|
|
|
f'{RED}Invalid url: "{url}". Use urls from '
|
|
|
|
'https://play.qobuz.com!'
|
2020-12-15 00:48:15 +01:00
|
|
|
)
|
|
|
|
return
|
|
|
|
if type_dict["func"]:
|
|
|
|
content = [item for item in type_dict["func"](item_id)]
|
|
|
|
content_name = content[0]["name"]
|
2020-12-18 02:27:08 +01:00
|
|
|
logger.info(
|
2021-02-24 23:50:57 +01:00
|
|
|
f"{YELLOW}Downloading all the music from {content_name} "
|
|
|
|
f"({url_type})!"
|
2020-12-15 00:48:15 +01:00
|
|
|
)
|
|
|
|
new_path = self.create_dir(
|
|
|
|
os.path.join(self.directory, sanitize_filename(content_name))
|
|
|
|
)
|
2021-02-24 23:50:57 +01:00
|
|
|
items = [item[type_dict["iterable_key"]]["items"]
|
|
|
|
for item in content][0]
|
2020-12-18 02:27:08 +01:00
|
|
|
logger.info(f"{YELLOW}{len(items)} downloads in queue")
|
2020-12-15 00:48:15 +01:00
|
|
|
for item in items:
|
|
|
|
self.download_from_id(
|
|
|
|
item["id"],
|
|
|
|
True if type_dict["iterable_key"] == "albums" else False,
|
|
|
|
new_path,
|
|
|
|
)
|
2020-12-15 21:23:11 +01:00
|
|
|
if url_type == "playlist":
|
|
|
|
self.make_m3u(new_path)
|
2020-12-15 00:48:15 +01:00
|
|
|
else:
|
|
|
|
self.download_from_id(item_id, type_dict["album"])
|
|
|
|
|
|
|
|
def download_list_of_urls(self, urls):
|
|
|
|
if not urls or not isinstance(urls, list):
|
2020-12-18 02:27:08 +01:00
|
|
|
logger.info(f"{OFF}Nothing to download")
|
2020-12-15 00:48:15 +01:00
|
|
|
return
|
|
|
|
for url in urls:
|
|
|
|
if "last.fm" in url:
|
|
|
|
self.download_lastfm_pl(url)
|
2020-12-15 03:06:29 +01:00
|
|
|
elif os.path.isfile(url):
|
|
|
|
self.download_from_txt_file(url)
|
2020-12-15 00:48:15 +01:00
|
|
|
else:
|
|
|
|
self.handle_url(url)
|
|
|
|
|
|
|
|
def download_from_txt_file(self, txt_file):
|
|
|
|
with open(txt_file, "r") as txt:
|
|
|
|
try:
|
2021-01-01 21:06:11 +01:00
|
|
|
urls = [
|
|
|
|
line.replace("\n", "")
|
|
|
|
for line in txt.readlines()
|
|
|
|
if not line.strip().startswith("#")
|
|
|
|
]
|
2020-12-15 00:48:15 +01:00
|
|
|
except Exception as e:
|
2020-12-18 02:27:08 +01:00
|
|
|
logger.error(f"{RED}Invalid text file: {e}")
|
2020-12-15 00:48:15 +01:00
|
|
|
return
|
2020-12-18 02:27:08 +01:00
|
|
|
logger.info(
|
2021-01-01 21:06:11 +01:00
|
|
|
f"{YELLOW}qobuz-dl will download {len(urls)}"
|
|
|
|
f" urls from file: {txt_file}"
|
2020-12-15 00:48:15 +01:00
|
|
|
)
|
|
|
|
self.download_list_of_urls(urls)
|
|
|
|
|
|
|
|
def lucky_mode(self, query, download=True):
|
|
|
|
if len(query) < 3:
|
2020-12-18 02:27:08 +01:00
|
|
|
logger.info(f"{RED}Your search query is too short or invalid")
|
|
|
|
return
|
2020-12-15 00:48:15 +01:00
|
|
|
|
2020-12-18 02:27:08 +01:00
|
|
|
logger.info(
|
|
|
|
f'{YELLOW}Searching {self.lucky_type}s for "{query}".\n'
|
2021-02-24 23:50:57 +01:00
|
|
|
f"{YELLOW}qobuz-dl will attempt to download the first "
|
|
|
|
f"{self.lucky_limit} results."
|
2020-12-15 00:48:15 +01:00
|
|
|
)
|
2021-02-24 23:50:57 +01:00
|
|
|
results = self.search_by_type(query, self.lucky_type,
|
|
|
|
self.lucky_limit, True)
|
2020-12-15 00:48:15 +01:00
|
|
|
|
|
|
|
if download:
|
|
|
|
self.download_list_of_urls(results)
|
|
|
|
|
|
|
|
return results
|
|
|
|
|
|
|
|
def format_duration(self, duration):
|
|
|
|
return time.strftime("%H:%M:%S", time.gmtime(duration))
|
|
|
|
|
|
|
|
def search_by_type(self, query, item_type, limit=10, lucky=False):
|
|
|
|
if len(query) < 3:
|
2020-12-18 02:27:08 +01:00
|
|
|
logger.info("{RED}Your search query is too short or invalid")
|
2020-12-15 00:48:15 +01:00
|
|
|
return
|
|
|
|
|
|
|
|
possibles = {
|
|
|
|
"album": {
|
|
|
|
"func": self.client.search_albums,
|
|
|
|
"album": True,
|
|
|
|
"key": "albums",
|
|
|
|
"format": "{artist[name]} - {title}",
|
|
|
|
"requires_extra": True,
|
|
|
|
},
|
|
|
|
"artist": {
|
|
|
|
"func": self.client.search_artists,
|
|
|
|
"album": True,
|
|
|
|
"key": "artists",
|
|
|
|
"format": "{name} - ({albums_count} releases)",
|
|
|
|
"requires_extra": False,
|
|
|
|
},
|
|
|
|
"track": {
|
|
|
|
"func": self.client.search_tracks,
|
|
|
|
"album": False,
|
|
|
|
"key": "tracks",
|
|
|
|
"format": "{performer[name]} - {title}",
|
|
|
|
"requires_extra": True,
|
|
|
|
},
|
|
|
|
"playlist": {
|
|
|
|
"func": self.client.search_playlists,
|
|
|
|
"album": False,
|
|
|
|
"key": "playlists",
|
|
|
|
"format": "{name} - ({tracks_count} releases)",
|
|
|
|
"requires_extra": False,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
try:
|
|
|
|
mode_dict = possibles[item_type]
|
|
|
|
results = mode_dict["func"](query, limit)
|
|
|
|
iterable = results[mode_dict["key"]]["items"]
|
|
|
|
item_list = []
|
|
|
|
for i in iterable:
|
|
|
|
fmt = PartialFormatter()
|
|
|
|
text = fmt.format(mode_dict["format"], **i)
|
|
|
|
if mode_dict["requires_extra"]:
|
|
|
|
|
|
|
|
text = "{} - {} [{}]".format(
|
|
|
|
text,
|
|
|
|
self.format_duration(i["duration"]),
|
|
|
|
"HI-RES" if i["hires_streamable"] else "LOSSLESS",
|
|
|
|
)
|
|
|
|
|
|
|
|
url = "{}{}/{}".format(WEB_URL, item_type, i.get("id", ""))
|
2021-02-24 23:50:57 +01:00
|
|
|
item_list.append({"text": text, "url": url} if not lucky
|
|
|
|
else url)
|
2020-12-15 00:48:15 +01:00
|
|
|
return item_list
|
|
|
|
except (KeyError, IndexError):
|
2020-12-18 02:27:08 +01:00
|
|
|
logger.info(f"{RED}Invalid type: {item_type}")
|
2020-12-15 00:48:15 +01:00
|
|
|
return
|
|
|
|
|
|
|
|
def interactive(self, download=True):
|
|
|
|
try:
|
|
|
|
from pick import pick
|
|
|
|
except (ImportError, ModuleNotFoundError):
|
|
|
|
if os.name == "nt":
|
2020-12-18 02:27:08 +01:00
|
|
|
sys.exit(
|
2021-02-24 23:50:57 +01:00
|
|
|
'Please install curses with '
|
|
|
|
'"pip3 install windows-curses" to continue'
|
2020-12-18 02:27:08 +01:00
|
|
|
)
|
2020-12-15 00:48:15 +01:00
|
|
|
raise
|
|
|
|
|
|
|
|
qualities = [
|
|
|
|
{"q_string": "320", "q": 5},
|
|
|
|
{"q_string": "Lossless", "q": 6},
|
|
|
|
{"q_string": "Hi-res =< 96kHz", "q": 7},
|
|
|
|
{"q_string": "Hi-Res > 96 kHz", "q": 27},
|
|
|
|
]
|
|
|
|
|
|
|
|
def get_title_text(option):
|
|
|
|
return option.get("text")
|
|
|
|
|
|
|
|
def get_quality_text(option):
|
|
|
|
return option.get("q_string")
|
|
|
|
|
|
|
|
try:
|
|
|
|
item_types = ["Albums", "Tracks", "Artists", "Playlists"]
|
2021-02-24 23:50:57 +01:00
|
|
|
selected_type = pick(item_types,
|
|
|
|
"I'll search for:\n[press Intro]"
|
|
|
|
)[0][:-1].lower()
|
|
|
|
logger.info(f"{YELLOW}Ok, we'll search for "
|
|
|
|
f"{selected_type}s{RESET}")
|
2020-12-15 00:48:15 +01:00
|
|
|
final_url_list = []
|
|
|
|
while True:
|
2021-02-24 23:50:57 +01:00
|
|
|
query = input(f"{CYAN}Enter your search: [Ctrl + c to quit]\n"
|
|
|
|
f"-{DF} ")
|
2020-12-18 02:55:42 +01:00
|
|
|
logger.info(f"{YELLOW}Searching...{RESET}")
|
2020-12-15 00:48:15 +01:00
|
|
|
options = self.search_by_type(
|
|
|
|
query, selected_type, self.interactive_limit
|
|
|
|
)
|
|
|
|
if not options:
|
2020-12-18 02:55:42 +01:00
|
|
|
logger.info(f"{OFF}Nothing found{RESET}")
|
2020-12-15 00:48:15 +01:00
|
|
|
continue
|
|
|
|
title = (
|
2020-12-18 02:27:08 +01:00
|
|
|
f'*** RESULTS FOR "{query.title()}" ***\n\n'
|
2020-12-15 00:48:15 +01:00
|
|
|
"Select [space] the item(s) you want to download "
|
|
|
|
"(one or more)\nPress Ctrl + c to quit\n"
|
2020-12-18 02:27:08 +01:00
|
|
|
"Don't select anything to try another search"
|
2020-12-15 00:48:15 +01:00
|
|
|
)
|
|
|
|
selected_items = pick(
|
|
|
|
options,
|
|
|
|
title,
|
|
|
|
multiselect=True,
|
|
|
|
min_selection_count=0,
|
|
|
|
options_map_func=get_title_text,
|
|
|
|
)
|
|
|
|
if len(selected_items) > 0:
|
2021-02-24 23:50:57 +01:00
|
|
|
[final_url_list.append(i[0]["url"])
|
|
|
|
for i in selected_items]
|
2020-12-15 00:48:15 +01:00
|
|
|
y_n = pick(
|
|
|
|
["Yes", "No"],
|
2021-02-24 23:50:57 +01:00
|
|
|
"Items were added to queue to be downloaded. "
|
|
|
|
"Keep searching?",
|
2020-12-15 00:48:15 +01:00
|
|
|
)
|
|
|
|
if y_n[0][0] == "N":
|
|
|
|
break
|
|
|
|
else:
|
2020-12-18 02:55:42 +01:00
|
|
|
logger.info(f"{YELLOW}Ok, try again...{RESET}")
|
2020-12-15 00:48:15 +01:00
|
|
|
continue
|
|
|
|
if final_url_list:
|
|
|
|
desc = (
|
2020-12-18 02:27:08 +01:00
|
|
|
"Select [intro] the quality (the quality will "
|
2021-02-24 23:50:57 +01:00
|
|
|
"be automatically\ndowngraded if the selected "
|
|
|
|
"is not found)"
|
2020-12-15 00:48:15 +01:00
|
|
|
)
|
|
|
|
self.quality = pick(
|
|
|
|
qualities,
|
|
|
|
desc,
|
|
|
|
default_index=1,
|
|
|
|
options_map_func=get_quality_text,
|
|
|
|
)[0]["q"]
|
|
|
|
|
|
|
|
if download:
|
|
|
|
self.download_list_of_urls(final_url_list)
|
|
|
|
|
|
|
|
return final_url_list
|
|
|
|
except KeyboardInterrupt:
|
2020-12-18 02:27:08 +01:00
|
|
|
logger.info(f"{YELLOW}Bye")
|
2020-12-15 00:48:15 +01:00
|
|
|
return
|
|
|
|
|
|
|
|
def download_lastfm_pl(self, playlist_url):
|
|
|
|
# Apparently, last fm API doesn't have a playlist endpoint. If you
|
|
|
|
# find out that it has, please fix this!
|
2020-12-18 02:27:08 +01:00
|
|
|
try:
|
|
|
|
r = requests.get(playlist_url, timeout=10)
|
|
|
|
except requests.exceptions.RequestException as e:
|
|
|
|
logger.error(f"{RED}Playlist download failed: {e}")
|
|
|
|
return
|
2020-12-15 00:48:15 +01:00
|
|
|
soup = bso(r.content, "html.parser")
|
|
|
|
artists = [artist.text for artist in soup.select(ARTISTS_SELECTOR)]
|
|
|
|
titles = [title.text for title in soup.select(TITLE_SELECTOR)]
|
|
|
|
|
2020-12-18 02:27:08 +01:00
|
|
|
track_list = []
|
2020-12-15 00:48:15 +01:00
|
|
|
if len(artists) == len(titles) and artists:
|
|
|
|
track_list = [
|
|
|
|
artist + " " + title for artist, title in zip(artists, titles)
|
|
|
|
]
|
|
|
|
|
|
|
|
if not track_list:
|
2020-12-18 02:27:08 +01:00
|
|
|
logger.info(f"{OFF}Nothing found")
|
2020-12-15 00:48:15 +01:00
|
|
|
return
|
|
|
|
|
|
|
|
pl_title = sanitize_filename(soup.select_one("h1").text)
|
2020-12-15 21:23:11 +01:00
|
|
|
pl_directory = os.path.join(self.directory, pl_title)
|
2020-12-18 02:27:08 +01:00
|
|
|
logger.info(
|
2021-02-24 23:50:57 +01:00
|
|
|
f"{YELLOW}Downloading playlist: {pl_title} "
|
|
|
|
f"({len(track_list)} tracks)"
|
2020-12-18 02:27:08 +01:00
|
|
|
)
|
2020-12-15 21:23:11 +01:00
|
|
|
|
2020-12-15 00:48:15 +01:00
|
|
|
for i in track_list:
|
2021-02-24 23:50:57 +01:00
|
|
|
track_id = self.get_id(self.search_by_type(i, "track", 1,
|
|
|
|
lucky=True)[0])
|
2020-12-15 21:23:11 +01:00
|
|
|
if track_id:
|
|
|
|
self.download_from_id(track_id, False, pl_directory)
|
|
|
|
|
|
|
|
self.make_m3u(pl_directory)
|
|
|
|
|
|
|
|
def make_m3u(self, pl_directory):
|
|
|
|
if self.no_m3u_for_playlists:
|
|
|
|
return
|
2020-12-16 19:21:15 +01:00
|
|
|
|
2020-12-15 21:23:11 +01:00
|
|
|
track_list = ["#EXTM3U"]
|
2020-12-16 19:21:15 +01:00
|
|
|
rel_folder = os.path.basename(os.path.normpath(pl_directory))
|
|
|
|
pl_name = rel_folder + ".m3u"
|
2020-12-15 21:23:11 +01:00
|
|
|
for local, dirs, files in os.walk(pl_directory):
|
|
|
|
dirs.sort()
|
2020-12-16 19:21:15 +01:00
|
|
|
audio_rel_files = [
|
|
|
|
# os.path.abspath(os.path.join(local, file_))
|
2021-02-24 23:50:57 +01:00
|
|
|
# os.path.join(rel_folder,
|
|
|
|
# os.path.basename(os.path.normpath(local)),
|
|
|
|
# file_)
|
2020-12-16 19:21:15 +01:00
|
|
|
os.path.join(os.path.basename(os.path.normpath(local)), file_)
|
|
|
|
for file_ in files
|
|
|
|
if os.path.splitext(file_)[-1] in EXTENSIONS
|
|
|
|
]
|
2020-12-15 21:23:11 +01:00
|
|
|
audio_files = [
|
|
|
|
os.path.abspath(os.path.join(local, file_))
|
|
|
|
for file_ in files
|
|
|
|
if os.path.splitext(file_)[-1] in EXTENSIONS
|
|
|
|
]
|
2020-12-16 19:21:15 +01:00
|
|
|
if not audio_files or len(audio_files) != len(audio_rel_files):
|
2020-12-15 21:23:11 +01:00
|
|
|
continue
|
2020-12-16 19:21:15 +01:00
|
|
|
|
2021-02-24 23:50:57 +01:00
|
|
|
for audio_rel_file, audio_file in zip(audio_rel_files,
|
|
|
|
audio_files):
|
2020-12-15 21:23:11 +01:00
|
|
|
try:
|
2020-12-16 19:21:15 +01:00
|
|
|
pl_item = (
|
|
|
|
EasyMP3(audio_file)
|
|
|
|
if ".mp3" in audio_file
|
|
|
|
else FLAC(audio_file)
|
|
|
|
)
|
2020-12-15 21:23:11 +01:00
|
|
|
title = pl_item["TITLE"][0]
|
|
|
|
artist = pl_item["ARTIST"][0]
|
|
|
|
length = int(pl_item.info.length)
|
|
|
|
index = "#EXTINF:{}, {} - {}\n{}".format(
|
2020-12-16 19:21:15 +01:00
|
|
|
length, artist, title, audio_rel_file
|
2020-12-15 21:23:11 +01:00
|
|
|
)
|
|
|
|
except: # noqa
|
|
|
|
continue
|
|
|
|
track_list.append(index)
|
|
|
|
|
|
|
|
if len(track_list) > 1:
|
2020-12-16 19:21:15 +01:00
|
|
|
with open(os.path.join(pl_directory, pl_name), "w") as pl:
|
2020-12-15 21:23:11 +01:00
|
|
|
pl.write("\n\n".join(track_list))
|