Merge branch 'development' into shutdown-restart-refactor

This commit is contained in:
morpheus65535 2024-02-26 17:00:35 -05:00 committed by GitHub
commit a20485c426
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
36 changed files with 1031 additions and 193 deletions

View File

@ -2,7 +2,7 @@
## Tools required
- Python 3.7.x to 3.11.x (3.9.x is highly recommended and 3.12 or greater is proscribed).
- Python 3.8.x to 3.11.x (3.10.x is highly recommended and 3.12 or greater is proscribed).
- Pycharm or Visual Studio code IDE are recommended but if you're happy with VIM, enjoy it!
- Git.
- UI testing must be done using Chrome latest version.
@ -42,4 +42,4 @@ As we're using Git in the development process, you better disable automatic upda
- All commits must have a meaningful commit message (ex.: Fixed issue with this, Improved process abc, Added input field to UI, etc.).
- Fixes can be made directly to `development` branch but keep in mind that a pre-release with a beta versioning will be created every day a new push is made.
- Features must be developed in dedicated feature branch and merged back to `development` branch using PR.
- Once reviewed, your PR will be merged using Squash and Merge with a meaningful message.
- Once reviewed, your PR will be merged by morpheus65535 using Squash and Merge with a meaningful message.

View File

@ -4,6 +4,7 @@ import hashlib
import os
import ast
import logging
import re
from urllib.parse import quote_plus
from literals import EXIT_VALIDATION_ERROR
@ -124,6 +125,12 @@ validators = [
Validator('general.skip_hashing', must_exist=True, default=False, is_type_of=bool),
Validator('general.language_equals', must_exist=True, default=[], is_type_of=list),
# log section
Validator('log.include_filter', must_exist=True, default='', is_type_of=str, cast=str),
Validator('log.exclude_filter', must_exist=True, default='', is_type_of=str, cast=str),
Validator('log.ignore_case', must_exist=True, default=False, is_type_of=bool),
Validator('log.use_regex', must_exist=True, default=False, is_type_of=bool),
# auth section
Validator('auth.apikey', must_exist=True, default=hexlify(os.urandom(16)).decode(), is_type_of=str),
Validator('auth.type', must_exist=True, default=None, is_type_of=(NoneType, str),
@ -217,6 +224,14 @@ validators = [
Validator('addic7ed.user_agent', must_exist=True, default='', is_type_of=str),
Validator('addic7ed.vip', must_exist=True, default=False, is_type_of=bool),
# avistaz section
Validator('avistaz.cookies', must_exist=True, default='', is_type_of=str),
Validator('avistaz.user_agent', must_exist=True, default='', is_type_of=str),
# cinemaz section
Validator('cinemaz.cookies', must_exist=True, default='', is_type_of=str),
Validator('cinemaz.user_agent', must_exist=True, default='', is_type_of=str),
# podnapisi section
Validator('podnapisi.verify_ssl', must_exist=True, default=True, is_type_of=bool),
@ -285,7 +300,8 @@ validators = [
Validator('embeddedsubtitles.included_codecs', must_exist=True, default=[], is_type_of=list),
Validator('embeddedsubtitles.hi_fallback', must_exist=True, default=False, is_type_of=bool),
Validator('embeddedsubtitles.timeout', must_exist=True, default=600, is_type_of=int, gte=1),
Validator('embeddedsubtitles.unknown_as_english', must_exist=True, default=False, is_type_of=bool),
Validator('embeddedsubtitles.unknown_as_fallback', must_exist=True, default=False, is_type_of=bool),
Validator('embeddedsubtitles.fallback_lang', must_exist=True, default='en', is_type_of=str, cast=str),
# karagarga section
Validator('karagarga.username', must_exist=True, default='', is_type_of=str, cast=str),
@ -424,7 +440,7 @@ array_keys = ['excluded_tags',
empty_values = ['', 'None', 'null', 'undefined', None, []]
str_keys = ['chmod']
str_keys = ['chmod', 'log_include_filter', 'log_exclude_filter']
# Increase Sonarr and Radarr sync interval since we now use SignalR feed to update in real time
if settings.sonarr.series_sync < 15:
@ -443,6 +459,12 @@ if settings.general.wanted_search_frequency == 3:
if settings.general.wanted_search_frequency_movie == 3:
settings.general.wanted_search_frequency_movie = 6
# backward compatibility embeddedsubtitles provider
if hasattr(settings.embeddedsubtitles, 'unknown_as_english'):
if settings.embeddedsubtitles.unknown_as_english:
settings.embeddedsubtitles.unknown_as_fallback = True
settings.embeddedsubtitles.fallback_lang = 'en'
del settings.embeddedsubtitles.unknown_as_english
# save updated settings to file
write_config()
@ -465,6 +487,24 @@ def get_settings():
settings_to_return[k].update({subk: subv})
return settings_to_return
def validate_log_regex():
# handle bug in dynaconf that changes strings to numbers, so change them back to str
if not isinstance(settings.log.include_filter, str):
settings.log.include_filter = str(settings.log.include_filter)
if not isinstance(settings.log.exclude_filter, str):
settings.log.exclude_filter = str(settings.log.exclude_filter)
if (settings.log.use_regex):
# compile any regular expressions specified to see if they are valid
# if invalid, tell the user which one
try:
re.compile(settings.log.include_filter)
except:
raise ValidationError(f"Include filter: invalid regular expression: {settings.log.include_filter}")
try:
re.compile(settings.log.exclude_filter)
except:
raise ValidationError(f"Exclude filter: invalid regular expression: {settings.log.exclude_filter}")
def save_settings(settings_items):
configure_debug = False
@ -482,7 +522,8 @@ def save_settings(settings_items):
undefined_subtitles_track_default_changed = False
audio_tracks_parsing_changed = False
reset_providers = False
check_log_regex = False
# Subzero Mods
update_subzero = False
subzero_mods = get_array_from(settings.general.subzero_mods)
@ -618,12 +659,10 @@ def save_settings(settings_items):
if key != settings.opensubtitlescom.username:
reset_providers = True
region.delete('oscom_token')
region.delete('oscom_server')
elif key == 'settings-opensubtitlescom-password':
if key != settings.opensubtitlescom.password:
reset_providers = True
region.delete('oscom_token')
region.delete('oscom_server')
if key == 'settings-subscene-username':
if key != settings.subscene.username:
@ -706,6 +745,7 @@ def save_settings(settings_items):
try:
settings.validators.validate()
validate_log_regex()
except ValidationError:
settings.reload()
raise

View File

@ -229,6 +229,14 @@ def get_providers_auth():
'user_agent': settings.addic7ed.user_agent,
'is_vip': settings.addic7ed.vip,
},
'avistaz': {
'cookies': settings.avistaz.cookies,
'user_agent': settings.avistaz.user_agent,
},
'cinemaz': {
'cookies': settings.cinemaz.cookies,
'user_agent': settings.cinemaz.user_agent,
},
'opensubtitles': {
'username': settings.opensubtitles.username,
'password': settings.opensubtitles.password,
@ -293,7 +301,8 @@ def get_providers_auth():
'ffprobe_path': _FFPROBE_BINARY,
'ffmpeg_path': _FFMPEG_BINARY,
'timeout': settings.embeddedsubtitles.timeout,
'unknown_as_english': settings.embeddedsubtitles.unknown_as_english,
'unknown_as_fallback': settings.embeddedsubtitles.unknown_as_fallback,
'fallback_lang': settings.embeddedsubtitles.fallback_lang,
},
'karagarga': {
'username': settings.karagarga.username,

View File

@ -92,10 +92,10 @@ def configure_logging(debug=False):
warnings.simplefilter('ignore', category=PytzUsageWarning)
# warnings.simplefilter('ignore', category=SAWarning)
if not debug:
log_level = "INFO"
if debug:
log_level = logging.DEBUG
else:
log_level = "DEBUG"
log_level = logging.INFO
logger.handlers = []
@ -107,7 +107,7 @@ def configure_logging(debug=False):
'%(asctime)-15s - %(name)-32s (%(thread)x) : %(levelname)s (%(module)s:%(lineno)d) - %(message)s')
ch.setFormatter(cf)
ch.setLevel(log_level)
ch.setLevel(logging.DEBUG)
logger.addHandler(ch)
# File Logging
@ -121,7 +121,7 @@ def configure_logging(debug=False):
f = FileHandlerFormatter('%(asctime)s|%(levelname)-8s|%(name)-32s|%(message)s|',
'%Y-%m-%d %H:%M:%S')
fh.setFormatter(f)
fh.setLevel(log_level)
fh.setLevel(logging.DEBUG)
logger.addHandler(fh)
if debug:

View File

@ -301,17 +301,11 @@ class Scheduler:
name='Search for Missing Movies Subtitles', replace_existing=True)
def __upgrade_subtitles_task(self):
if settings.general.upgrade_subs and \
(settings.general.use_sonarr or settings.general.use_radarr):
if settings.general.use_sonarr or settings.general.use_radarr:
self.aps_scheduler.add_job(
upgrade_subtitles, IntervalTrigger(hours=int(settings.general.upgrade_frequency)), max_instances=1,
coalesce=True, misfire_grace_time=15, id='upgrade_subtitles',
name='Upgrade Previously Downloaded Subtitles', replace_existing=True)
else:
try:
self.aps_scheduler.remove_job(job_id='upgrade_subtitles')
except JobLookupError:
pass
def __randomize_interval_task(self):
for job in self.aps_scheduler.get_jobs():

View File

@ -93,3 +93,32 @@ def get_movies_from_radarr_api(apikey_radarr, radarr_id=None):
return r.json()
else:
return
def get_history_from_radarr_api(apikey_radarr, movie_id):
url_radarr_api_history = f"{url_api_radarr()}history?eventType=1&movieIds={movie_id}&apikey={apikey_radarr}"
try:
r = requests.get(url_radarr_api_history, timeout=int(settings.sonarr.http_timeout), verify=False,
headers=headers)
r.raise_for_status()
except requests.exceptions.HTTPError:
logging.exception("BAZARR Error trying to get history from Radarr. Http error.")
return
except requests.exceptions.ConnectionError:
logging.exception("BAZARR Error trying to get history from Radarr. Connection Error.")
return
except requests.exceptions.Timeout:
logging.exception("BAZARR Error trying to get history from Radarr. Timeout Error.")
return
except requests.exceptions.RequestException:
logging.exception("BAZARR Error trying to get history from Radarr.")
return
except Exception as e:
logging.exception(f"Exception raised while getting history from Radarr API: {e}")
return
else:
if r.status_code == 200:
return r.json()
else:
return

View File

@ -166,3 +166,32 @@ def get_episodesFiles_from_sonarr_api(apikey_sonarr, series_id=None, episode_fil
return r.json()
else:
return
def get_history_from_sonarr_api(apikey_sonarr, episode_id):
url_sonarr_api_history = f"{url_api_sonarr()}history?eventType=1&episodeId={episode_id}&apikey={apikey_sonarr}"
try:
r = requests.get(url_sonarr_api_history, timeout=int(settings.sonarr.http_timeout), verify=False,
headers=headers)
r.raise_for_status()
except requests.exceptions.HTTPError:
logging.exception("BAZARR Error trying to get history from Sonarr. Http error.")
return
except requests.exceptions.ConnectionError:
logging.exception("BAZARR Error trying to get history from Sonarr. Connection Error.")
return
except requests.exceptions.Timeout:
logging.exception("BAZARR Error trying to get history from Sonarr. Timeout Error.")
return
except requests.exceptions.RequestException:
logging.exception("BAZARR Error trying to get history from Sonarr.")
return
except Exception as e:
logging.exception(f"Exception raised while getting history from Sonarr API: {e}")
return
else:
if r.status_code == 200:
return r.json()
else:
return

View File

@ -2,5 +2,10 @@
from .ffprobe import refine_from_ffprobe
from .database import refine_from_db
from .arr_history import refine_from_arr_history
registered = {"database": refine_from_db, "ffprobe": refine_from_ffprobe}
registered = {
"database": refine_from_db,
"ffprobe": refine_from_ffprobe,
"arr_history": refine_from_arr_history,
}

View File

@ -0,0 +1,32 @@
# coding=utf-8
# fmt: off
import logging
from app.config import settings
from radarr.sync.utils import get_history_from_radarr_api
from sonarr.sync.utils import get_history_from_sonarr_api
from subliminal import Episode, Movie
refined_providers = {'avistaz', 'cinemaz'}
def refine_from_arr_history(path, video):
if refined_providers.intersection(settings.general.enabled_providers) and video.info_url is None:
refine_info_url(video)
def refine_info_url(video):
if isinstance(video, Episode) and video.sonarrEpisodeId:
history = get_history_from_sonarr_api(settings.sonarr.apikey, video.sonarrEpisodeId)
elif isinstance(video, Movie) and video.radarrId:
history = get_history_from_radarr_api(settings.radarr.apikey, video.radarrId)
else:
return
for grab in history['records']:
# take the latest grab for the episode
if 'nzbInfoUrl' in grab['data'] and grab['data']['nzbInfoUrl']:
video.info_url = grab['data']['nzbInfoUrl']
logging.debug(f'Refining {video} with Info URL: {video.info_url}')
break

View File

@ -35,7 +35,13 @@ class SubSyncer:
no_fix_framerate=settings.subsync.no_fix_framerate, gss=settings.subsync.gss):
self.reference = video_path
self.srtin = srt_path
self.srtout = f'{os.path.splitext(self.srtin)[0]}.synced.srt'
if self.srtin.casefold().endswith('.ass'):
# try to preserve original subtitle style
# ffmpeg will be able to handle this automatically as long as it has the libass filter
extension = '.ass'
else:
extension = '.srt'
self.srtout = f'{os.path.splitext(self.srtin)[0]}.synced{extension}'
self.args = None
ffprobe_exe = get_binary('ffprobe')

View File

@ -211,6 +211,7 @@ function ManualSearchView<T extends SupportType>(props: Props<T>) {
</Alert>
<Collapse in={haveResult && !results.isFetching}>
<PageTable
autoScroll={false}
tableStyles={{ emptyText: "No result", placeholder: 10 }}
columns={columns}
data={results.data ?? []}

View File

@ -1,6 +1,6 @@
import { useIsLoading } from "@/contexts";
import { Group, Pagination, Text } from "@mantine/core";
import { FunctionComponent } from "react";
import { FunctionComponent, useEffect } from "react";
interface Props {
count: number;
index: number;
@ -22,6 +22,11 @@ const PageControl: FunctionComponent<Props> = ({
const isLoading = useIsLoading();
// Jump to first page if total page count changes
useEffect(() => {
goto(0);
}, [total, goto]);
return (
<Group p={16} position="apart">
<Text size="sm">

View File

@ -1,4 +1,5 @@
import { ScrollToTop } from "@/utilities";
import { usePageSize } from "@/utilities/storage";
import { useEffect } from "react";
import { usePagination, useTable } from "react-table";
import BaseTable from "./BaseTable";
@ -22,6 +23,9 @@ export default function PageTable<T extends object>(props: Props<T>) {
...(plugins ?? [])
);
// use page size as specified in UI settings
instance.state.pageSize = usePageSize();
if (instanceRef) {
instanceRef.current = instance;
}

View File

@ -27,7 +27,7 @@ class SocketIOClient {
this.socket.on("data", this.onEvent.bind(this));
this.events = [];
this.debounceReduce = debounce(this.reduce, 20);
this.debounceReduce = debounce(this.reduce, 3);
this.reducers = [];
onlineManager.setOnline(false);

View File

@ -18,6 +18,8 @@ import { useLanguageProfileBy } from "@/utilities/languages";
import {
faAdjust,
faBriefcase,
faCircleChevronDown,
faCircleChevronRight,
faCloudUploadAlt,
faHdd,
faSearch,
@ -28,11 +30,23 @@ import { Container, Group, Stack } from "@mantine/core";
import { Dropzone } from "@mantine/dropzone";
import { useDocumentTitle } from "@mantine/hooks";
import { showNotification } from "@mantine/notifications";
import { FunctionComponent, useCallback, useMemo, useRef } from "react";
import {
FunctionComponent,
useCallback,
useMemo,
useRef,
useState,
} from "react";
import { Navigate, useParams } from "react-router-dom";
import Table from "./table";
const SeriesEpisodesView: FunctionComponent = () => {
const [state, setState] = useState({
expand: false,
buttonText: "Expand All",
initial: true,
});
const params = useParams();
const id = Number.parseInt(params.id as string);
@ -94,6 +108,12 @@ const SeriesEpisodesView: FunctionComponent = () => {
return <Navigate to={RouterNames.NotFound}></Navigate>;
}
const toggleState = () => {
state.expand
? setState({ expand: false, buttonText: "Expand All", initial: false })
: setState({ expand: true, buttonText: "Collapse All", initial: false });
};
return (
<Container px={0} fluid>
<QueryOverlay result={seriesQuery}>
@ -189,12 +209,22 @@ const SeriesEpisodesView: FunctionComponent = () => {
>
Edit Series
</Toolbox.Button>
<Toolbox.Button
icon={state.expand ? faCircleChevronRight : faCircleChevronDown}
onClick={() => {
toggleState();
}}
>
{state.buttonText}
</Toolbox.Button>
</Group>
</Toolbox>
<Stack>
<ItemOverview item={series ?? null} details={details}></ItemOverview>
<QueryOverlay result={episodesQuery}>
<Table
expand={state.expand}
initial={state.initial}
episodes={episodes ?? null}
profile={profile}
disabled={hasTask || !series || series.profileId === null}

View File

@ -31,9 +31,17 @@ interface Props {
episodes: Item.Episode[] | null;
disabled?: boolean;
profile?: Language.Profile;
expand?: boolean;
initial?: boolean;
}
const Table: FunctionComponent<Props> = ({ episodes, profile, disabled }) => {
const Table: FunctionComponent<Props> = ({
episodes,
profile,
disabled,
expand,
initial,
}) => {
const onlyDesired = useShowOnlyDesired();
const profileItems = useProfileItemsToLanguages(profile);
@ -212,9 +220,18 @@ const Table: FunctionComponent<Props> = ({ episodes, profile, disabled }) => {
useEffect(() => {
if (instance.current) {
instance.current.toggleRowExpanded([`season:${maxSeason}`], true);
if (initial) {
// expand the last/current season on initial display
instance.current.toggleRowExpanded([`season:${maxSeason}`], true);
// make sure season 0 is collapsed
instance.current.toggleRowExpanded([`season:0`], false);
} else {
if (expand !== undefined) {
instance.current.toggleAllRowsExpanded(expand);
}
}
}
}, [maxSeason]);
}, [maxSeason, expand, initial]);
return (
<GroupTable

View File

@ -177,6 +177,22 @@ const SettingsGeneralView: FunctionComponent = () => {
<Section header="Logging">
<Check label="Debug" settingKey="settings-general-debug"></Check>
<Message>Debug logging should only be enabled temporarily</Message>
<Text
label="Include Filter"
settingKey="settings-log-include_filter"
></Text>
<Text
label="Exclude Filter"
settingKey="settings-log-exclude_filter"
></Text>
<Check
label="Use Regular Expressions (Regex)"
settingKey="settings-log-use_regex"
></Check>
<Check
label="Ignore Case"
settingKey="settings-log-ignore_case"
></Check>
</Section>
<Section header="Backups">
<File

View File

@ -68,6 +68,24 @@ export const ProviderList: Readonly<ProviderInfo[]> = [
name: "Argenteam Dump",
description: "Subtitles dump of the now extinct Argenteam",
},
{
key: "avistaz",
name: "AvistaZ",
description:
"avistaz.to - AvistaZ is an Asian torrent tracker for HD movies, TV shows and music",
inputs: [
{
type: "text",
key: "cookies",
name: "Cookies, e.g., PHPSESSID=abc; wikisubtitlesuser=xyz; wikisubtitlespass=efg",
},
{
type: "text",
key: "user_agent",
name: "User-Agent, e.g., Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:95.0) Gecko/20100101 Firefox/95.0",
},
],
},
{
key: "assrt",
description: "Chinese Subtitles Provider",
@ -96,6 +114,24 @@ export const ProviderList: Readonly<ProviderInfo[]> = [
description:
"Provider removed from Bazarr because it was causing too many issues.\nIt will always return no subtitles.",
},
{
key: "cinemaz",
name: "CinemaZ",
description:
"cinemaz.to - CinemaZ is a private torrent tracker which is dedicated to little-known and cult films that you will not find on other popular torrent resources.",
inputs: [
{
type: "text",
key: "cookies",
name: "Cookies, e.g., PHPSESSID=abc; wikisubtitlesuser=xyz; wikisubtitlespass=efg",
},
{
type: "text",
key: "user_agent",
name: "User-Agent, e.g., Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:95.0) Gecko/20100101 Firefox/95.0",
},
],
},
{
key: "embeddedsubtitles",
name: "Embedded Subtitles",
@ -120,8 +156,14 @@ export const ProviderList: Readonly<ProviderInfo[]> = [
},
{
type: "switch",
key: "unknown_as_english",
name: "Use subtitles with unknown info/language as english",
key: "unknown_as_fallback",
name: "Use subtitles with unknown info/language as fallback language",
},
{
type: "text",
key: "fallback_lang",
name: "Fallback language",
defaultValue: "en",
},
],
message:

View File

@ -0,0 +1,99 @@
import { useSettingsMutation, useSystemSettings } from "@/apis/hooks";
import { LoadingProvider } from "@/contexts";
import { useOnValueChange } from "@/utilities";
import { LOG } from "@/utilities/console";
import {
Button,
Container,
Divider,
Group,
LoadingOverlay,
Space,
} from "@mantine/core";
import { useForm } from "@mantine/form";
import { FunctionComponent, ReactNode, useCallback, useMemo } from "react";
import { FormContext, FormValues, runHooks } from "../utilities/FormValues";
import { SettingsProvider } from "../utilities/SettingsProvider";
interface Props {
children: ReactNode;
callbackModal: (value: boolean) => void;
}
const LayoutModal: FunctionComponent<Props> = (props) => {
const { children, callbackModal } = props;
const { data: settings, isLoading, isRefetching } = useSystemSettings();
const { mutate, isLoading: isMutating } = useSettingsMutation();
const form = useForm<FormValues>({
initialValues: {
settings: {},
hooks: {},
},
});
useOnValueChange(isRefetching, (value) => {
if (!value) {
form.reset();
}
});
const submit = useCallback(
(values: FormValues) => {
const { settings, hooks } = values;
if (Object.keys(settings).length > 0) {
const settingsToSubmit = { ...settings };
runHooks(hooks, settingsToSubmit);
LOG("info", "submitting settings", settingsToSubmit);
mutate(settingsToSubmit);
// wait for settings to be validated before callback
// let the user see the spinning indicator on the Save button before the modal closes
setTimeout(() => {
callbackModal(true);
}, 500);
}
},
[mutate, callbackModal]
);
const totalStagedCount = useMemo(() => {
return Object.keys(form.values.settings).length;
}, [form.values.settings]);
return (
<SettingsProvider value={settings ?? null}>
<LoadingProvider value={isLoading || isMutating}>
<form onSubmit={form.onSubmit(submit)} style={{ position: "relative" }}>
<LoadingOverlay visible={settings === undefined}></LoadingOverlay>
<FormContext.Provider value={form}>
<Container size="xl" mx={0}>
{children}
</Container>
</FormContext.Provider>
<Space h="md" />
<Divider></Divider>
<Space h="md" />
<Group position="right">
<Button
type="submit"
disabled={totalStagedCount === 0}
loading={isMutating}
>
Save
</Button>
<Button
onClick={() => {
callbackModal(true);
}}
>
Close
</Button>
</Group>
</form>
</LoadingProvider>
</SettingsProvider>
);
};
export default LayoutModal;

View File

@ -65,6 +65,7 @@ export const URLTestButton: FunctionComponent<{
export * from "./Card";
export * from "./Layout";
export { default as Layout } from "./Layout";
export { default as LayoutModal } from "./LayoutModal";
export * from "./Message";
export * from "./Section";
export * from "./collapse";

View File

@ -1,10 +1,17 @@
import { useDeleteLogs, useSystemLogs } from "@/apis/hooks";
import { useDeleteLogs, useSystemLogs, useSystemSettings } from "@/apis/hooks";
import { Toolbox } from "@/components";
import { QueryOverlay } from "@/components/async";
import { Check, LayoutModal, Message, Text } from "@/pages/Settings/components";
import { Environment } from "@/utilities";
import { faDownload, faSync, faTrash } from "@fortawesome/free-solid-svg-icons";
import { Container, Group } from "@mantine/core";
import {
faDownload,
faFilter,
faSync,
faTrash,
} from "@fortawesome/free-solid-svg-icons";
import { Badge, Container, Group, Stack } from "@mantine/core";
import { useDocumentTitle } from "@mantine/hooks";
import { useModals } from "@mantine/modals";
import { FunctionComponent, useCallback } from "react";
import Table from "./table";
@ -20,6 +27,61 @@ const SystemLogsView: FunctionComponent = () => {
useDocumentTitle("Logs - Bazarr (System)");
const { data: settings } = useSystemSettings();
const modals = useModals();
const suffix = () => {
const include = settings?.log.include_filter;
const exclude = settings?.log.exclude_filter;
const includeIndex = include !== "" && include !== undefined ? 1 : 0;
const excludeIndex = exclude !== "" && exclude !== undefined ? 1 : 0;
const filters = [
["", "I"],
["E", "I/E"],
];
const filterStr = filters[excludeIndex][includeIndex];
const debugStr = settings?.general.debug ? "Debug" : "";
const spaceStr = debugStr !== "" && filterStr !== "" ? " " : "";
const suffixStr = debugStr + spaceStr + filterStr;
return suffixStr;
};
const openFilterModal = () => {
const callbackModal = (close: boolean) => {
if (close) {
modals.closeModal(id);
}
};
const id = modals.openModal({
title: "Set Log Debug and Filter Options",
children: (
<LayoutModal callbackModal={callbackModal}>
<Stack>
<Check label="Debug" settingKey="settings-general-debug"></Check>
<Message>Debug logging should only be enabled temporarily</Message>
<Text
label="Include Filter"
settingKey="settings-log-include_filter"
></Text>
<Text
label="Exclude Filter"
settingKey="settings-log-exclude_filter"
></Text>
<Check
label="Use Regular Expressions (Regex)"
settingKey="settings-log-use_regex"
></Check>
<Check
label="Ignore Case"
settingKey="settings-log-ignore_case"
></Check>
</Stack>
</LayoutModal>
),
});
};
return (
<Container fluid px={0}>
<QueryOverlay result={logs}>
@ -42,6 +104,22 @@ const SystemLogsView: FunctionComponent = () => {
>
Empty
</Toolbox.Button>
<Toolbox.Button
loading={isLoading}
icon={faFilter}
onClick={openFilterModal}
rightIcon={
suffix() !== "" ? (
<Badge size="xs" radius="sm">
{suffix()}
</Badge>
) : (
<></>
)
}
>
Filter
</Toolbox.Button>
</Group>
</Toolbox>
<Table logs={data ?? []}></Table>

View File

@ -1,6 +1,7 @@
/* eslint-disable @typescript-eslint/no-empty-interface */
interface Settings {
general: Settings.General;
log: Settings.Log;
proxy: Settings.Proxy;
auth: Settings.Auth;
subsync: Settings.Subsync;
@ -82,6 +83,13 @@ declare namespace Settings {
wanted_search_frequency_movie: number;
}
interface Log {
include_filter: string;
exclude_filter: string;
ignore_case: boolean;
use_regex: boolean;
}
interface Proxy {
exclude: string[];
type?: string;

View File

@ -0,0 +1,10 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from .avistaz_network import AvistazNetworkProviderBase
class AvistazProvider(AvistazNetworkProviderBase):
"""AvistaZ.to Provider."""
server_url = 'https://avistaz.to/'
provider_name = 'avistaz'

View File

@ -0,0 +1,373 @@
import logging
import time
from http.cookies import SimpleCookie
from random import randint
import pycountry
from requests.cookies import RequestsCookieJar
from subliminal.exceptions import AuthenticationError
from subliminal.providers import ParserBeautifulSoup
from subliminal_patch.http import RetryingCFSession
from subliminal_patch.pitcher import store_verification
from subliminal_patch.providers import Provider
from subliminal_patch.subtitle import Subtitle
from subzero.language import Language
from .utils import get_archive_from_bytes, get_subtitle_from_archive, FIRST_THOUSAND_OR_SO_USER_AGENTS as AGENT_LIST
logger = logging.getLogger(__name__)
supported_languages_names = [
"Abkhazian",
"Afar",
"Afrikaans",
"Akan",
"Albanian",
"Amharic",
"Arabic",
"Aragonese",
"Armenian",
"Assamese",
"Avaric",
"Avestan",
"Aymara",
"Azerbaijani",
"Bambara",
"Bashkir",
"Basque",
"Belarusian",
"Bengali",
"Bihari languages",
"Bislama",
"Bokmål, Norwegian",
"Bosnian",
"Brazilian Portuguese",
"Breton",
"Bulgarian",
"Burmese",
"Cantonese",
"Catalan",
"Central Khmer",
"Chamorro",
"Chechen",
"Chichewa",
"Chinese",
"Church Slavic",
"Chuvash",
"Cornish",
"Corsican",
"Cree",
"Croatian",
"Czech",
"Danish",
"Dhivehi",
"Dutch",
"Dzongkha",
"English",
"Esperanto",
"Estonian",
"Ewe",
"Faroese",
"Fijian",
"Filipino",
"Finnish",
"French",
"Fulah",
"Gaelic",
"Galician",
"Ganda",
"Georgian",
"German",
"Greek",
"Guarani",
"Gujarati",
"Haitian",
"Hausa",
"Hebrew",
"Herero",
"Hindi",
"Hiri Motu",
"Hungarian",
"Icelandic",
"Ido",
"Igbo",
"Indonesian",
"Interlingua",
"Interlingue",
"Inuktitut",
"Inupiaq",
"Irish",
"Italian",
"Japanese",
"Javanese",
"Kalaallisut",
"Kannada",
"Kanuri",
"Kashmiri",
"Kazakh",
"Kikuyu",
"Kinyarwanda",
"Kirghiz",
"Komi",
"Kongo",
"Korean",
"Kuanyama",
"Kurdish",
"Lao",
"Latin",
"Latvian",
"Limburgan",
"Lingala",
"Lithuanian",
"Luba-Katanga",
"Luxembourgish",
"Macedonian",
"Malagasy",
"Malay",
"Malayalam",
"Maltese",
"Mandarin",
"Manx",
"Maori",
"Marathi",
"Marshallese",
"Mongolian",
"Moore",
"Nauru",
"Navajo",
"Ndebele, North",
"Ndebele, South",
"Ndonga",
"Nepali",
"Northern Sami",
"Norwegian",
"Norwegian Nynorsk",
"Occitan (post 1500)",
"Ojibwa",
"Oriya",
"Oromo",
"Ossetian",
"Pali",
"Panjabi",
"Persian",
"Polish",
"Portuguese",
"Pushto",
"Quechua",
"Romanian",
"Romansh",
"Rundi",
"Russian",
"Samoan",
"Sango",
"Sanskrit",
"Sardinian",
"Serbian",
"Shona",
"Sichuan Yi",
"Sindhi",
"Sinhala",
"Slovak",
"Slovenian",
"Somali",
"Sotho, Southern",
"Spanish",
"Sundanese",
"Swahili",
"Swati",
"Swedish",
"Tagalog",
"Tahitian",
"Tajik",
"Tamil",
"Tatar",
"Telugu",
"Thai",
"Tibetan",
"Tigrinya",
"Tongan",
"Tsonga",
"Tswana",
"Turkish",
"Turkmen",
"Twi",
"Uighur",
"Ukrainian",
"Urdu",
"Uzbek",
"Venda",
"Vietnamese",
"Volapük",
"Walloon",
"Welsh",
"Western Frisian",
"Wolof",
"Xhosa",
"Yiddish",
"Yoruba",
"Zhuang",
"Zulu"
]
class AvistazNetworkSubtitle(Subtitle):
"""AvistaZ.to Subtitle."""
provider_name = None
def __init__(self, provider_name, page_link, download_link, language, video, filename, release, uploader):
super().__init__(language, page_link=page_link)
self.provider_name = provider_name
self.hearing_impaired = None
self.language = language
self.filename = filename
self.release_info = release
self.page_link = page_link
self.download_link = download_link
self.video = video
self.matches = None
self.content = None
self.uploader = uploader
self.encoding = None
@property
def id(self):
return self.filename
def get_matches(self, video):
# we download subtitles directly from the
# release page, so it's always a perfect match
self.matches = {'hash'}
return self.matches
def lookup_lang(name):
try:
return Language(pycountry.languages.lookup(name).alpha_3)
except:
return None
class AvistazNetworkProviderBase(Provider):
"""AvistaZ Network base provider"""
subtitle_class = AvistazNetworkSubtitle
languages = set(filter(None, map(lookup_lang, supported_languages_names)))
languages.update(set(Language.rebuild(L, hi=True) for L in languages))
server_url = None
provider_name = None
hash_verifiable = True
def __init__(self, cookies, user_agent=None):
self.session = None
self.cookies = cookies
self.user_agent = user_agent
def initialize(self):
self.session = RetryingCFSession()
if self.user_agent:
self.session.headers['User-Agent'] = self.user_agent
else:
self.session.headers['User-Agent'] = AGENT_LIST[randint(0, len(AGENT_LIST) - 1)]
if self.cookies:
self.session.cookies = RequestsCookieJar()
simple_cookie = SimpleCookie()
simple_cookie.load(self.cookies)
for k, v in simple_cookie.items():
self.session.cookies.set(k, v.value)
rr = self.session.get(self.server_url + 'rules', allow_redirects=False, timeout=10,
headers={"Referer": self.server_url})
if rr.status_code in [302, 404, 403]:
logger.info('Cookies expired')
raise AuthenticationError("cookies not valid anymore")
store_verification(self.provider_name, self.session)
logger.debug('Cookies valid')
time.sleep(2)
return True
def terminate(self):
self.session.close()
def list_subtitles(self, video, languages):
if video.info_url is None or not video.info_url.startswith(self.server_url):
logger.debug('%s not downloaded from %s. Skipped', video, self.server_url)
return []
html = self._query_info_url(video.info_url)
if html is None:
logger.debug('%s release page not found. Release might have been removed', video)
return []
release = self._parse_release_table(html)
if release['Subtitles'].table is None:
logger.debug('No subtitles found for %s', video)
return []
subtitle_columns = list(map(lambda x: x.get_text(), release['Subtitles'].thead.find_all('th')))
subtitles = []
for row in release['Subtitles'].tbody.find_all('tr', recursive=False):
subtitle_cols = self._parse_subtitle_row(row, subtitle_columns)
release_name = release['Title'].get_text().strip()
lang = lookup_lang(subtitle_cols['Language'].get_text().strip())
download_link = subtitle_cols['Download'].a['href']
uploader_name = subtitle_cols['Uploader'].get_text().strip()
if lang not in languages:
continue
subtitles.append(self.subtitle_class(
provider_name=self.provider_name,
page_link=video.info_url,
download_link=download_link,
language=lang,
video=video,
filename=download_link.split('/')[-1],
release=release_name,
uploader=uploader_name,
))
return subtitles
def _query_info_url(self, info_url):
response = self.session.get(info_url, timeout=30)
if response.status_code == 404:
return None
else:
response.raise_for_status()
return response.content.decode('utf-8', 'ignore')
def _parse_subtitle_row(self, row, subtitle_columns):
columns = {}
for i, data in enumerate(row.find_all('td', recursive=False)):
columns[subtitle_columns[i]] = data
return columns
def _parse_release_table(self, html):
release_data_table = (ParserBeautifulSoup(html, ['html.parser'])
.select_one('#content-area > div:nth-child(4) > div.table-responsive > table > tbody'))
rows = {}
for tr in release_data_table.find_all('tr', recursive=False):
rows[tr.td.get_text()] = tr.select_one('td:nth-child(2)', recursive=False)
return rows
def download_subtitle(self, subtitle):
response = self.session.get(subtitle.download_link)
response.raise_for_status()
if subtitle.filename.endswith((".zip", ".rar")):
archive = get_archive_from_bytes(response.content)
subtitle.content = get_subtitle_from_archive(
archive, episode=subtitle.video.episode
)
else:
subtitle.content = response.content

View File

@ -0,0 +1,9 @@
from __future__ import absolute_import
from subliminal_patch.providers.avistaz_network import AvistazNetworkProviderBase
class CinemazProvider(AvistazNetworkProviderBase):
"""CinemaZ.to Provider."""
server_url = 'https://cinemaz.to/'
provider_name = 'cinemaz'

View File

@ -85,7 +85,8 @@ class EmbeddedSubtitlesProvider(Provider):
ffmpeg_path=None,
hi_fallback=False,
timeout=600,
unknown_as_english=False,
unknown_as_fallback=False,
fallback_lang="en",
):
self._included_codecs = set(included_codecs or _ALLOWED_CODECS)
@ -97,7 +98,8 @@ class EmbeddedSubtitlesProvider(Provider):
cache_dir or tempfile.gettempdir(), self.__class__.__name__.lower()
)
self._hi_fallback = hi_fallback
self._unknown_as_english = unknown_as_english
self._unknown_as_fallback = unknown_as_fallback
self._fallback_lang = fallback_lang
self._cached_paths = {}
self._timeout = int(timeout)
@ -110,7 +112,7 @@ class EmbeddedSubtitlesProvider(Provider):
# Default is True
container.FFMPEG_STATS = False
tags.LANGUAGE_FALLBACK = "en" if self._unknown_as_english else None
tags.LANGUAGE_FALLBACK = self._fallback_lang if self._unknown_as_fallback and self._fallback_lang else None
logger.debug("Language fallback set: %s", tags.LANGUAGE_FALLBACK)
def initialize(self):

View File

@ -1,13 +1,13 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from json import JSONDecodeError
import logging
import random
import re
import time
from requests import Session
from subliminal import __short_version__
from subliminal.providers import ParserBeautifulSoup
from subliminal.video import Episode
from subliminal.video import Movie
from subliminal_patch.exceptions import APIThrottled
@ -15,6 +15,7 @@ from subliminal_patch.providers import Provider
from subliminal_patch.providers.utils import get_archive_from_bytes
from subliminal_patch.providers.utils import get_subtitle_from_archive
from subliminal_patch.providers.utils import update_matches
from subliminal_patch.providers.utils import USER_AGENTS
from subliminal_patch.subtitle import Subtitle
from subzero.language import Language
@ -40,9 +41,7 @@ _SEASON_NUM_RE = re.compile(
r"(s|(season|temporada)\s)(?P<x>\d{1,2})", flags=re.IGNORECASE
)
_EPISODE_YEAR_RE = re.compile(r"\((?P<x>(19\d{2}|20[0-2]\d))\)")
_UNSUPPORTED_RE = re.compile(
r"(\)?\d{4}\)?|[sS]\d{1,2})\s.{,3}(extras|forzado(s)?|forced)", flags=re.IGNORECASE
)
_UNSUPPORTED_RE = re.compile(r"(extras|forzado(s)?|forced)\s?$", flags=re.IGNORECASE)
logger = logging.getLogger(__name__)
@ -109,95 +108,122 @@ class SubdivxSubtitlesProvider(Provider):
multi_result_throttle = 2
def __init__(self):
self.session = None
self.session = Session()
def initialize(self):
self.session = Session()
self.session.headers["User-Agent"] = f"Subliminal/{__short_version__}"
# self.session.headers["User-Agent"] = f"Subliminal/{__short_version__}"
self.session.headers["User-Agent"] = random.choice(USER_AGENTS)
self.session.cookies.update({"iduser_cookie": _IDUSER_COOKIE})
def terminate(self):
self.session.close()
def query(self, video, languages):
def _query(self, video, languages):
subtitles = []
if isinstance(video, Episode):
episode = isinstance(video, Episode)
titles = [video.series if episode else video.title]
try:
titles.extend(video.alternative_titles)
except:
pass
else:
titles = titles[:5] # limit alt titles
logger.debug("Titles to look at: %s", titles)
if episode:
# TODO: cache pack queries (TV SHOW S01).
# Too many redundant server calls.
for title in titles:
for query in (
f"{title} S{video.season:02}E{video.episode:02}",
f"{title} S{video.season:02}",
):
subtitles += self._query_results(query, video)
for query in (
f"{video.series} S{video.season:02}E{video.episode:02}",
f"{video.series} S{video.season:02}",
):
subtitles += self._handle_multi_page_search(query, video)
# Try only with series title
if len(subtitles) <= 5:
subtitles += self._handle_multi_page_search(video.series, video, 1)
# Try with episode title as last resort
if not subtitles and video.title != video.series:
subtitles += self._handle_multi_page_search(video.title, video, 1)
else:
for query in (video.title, f"{video.title} ({video.year})"):
subtitles += self._handle_multi_page_search(query, video)
# Second query is a fallback
if subtitles:
# Try only with series title
if len(subtitles) <= 5:
subtitles += self._query_results(title, video)
else:
break
# Try with episode title as last resort
if not subtitles and video.title and video.title != title:
subtitles += self._query_results(video.title, video)
else:
for title in titles:
for query in (title, f"{title} ({video.year})"):
subtitles += self._query_results(query, video)
# Second query is a fallback
if subtitles:
break
return subtitles
def _handle_multi_page_search(self, query, video, max_loops=2):
params = {
"buscar2": query,
"accion": "5",
"masdesc": "",
"subtitulos": "1",
"realiza_b": "1",
"pg": 1,
}
def _query_results(self, query, video):
search_link = f"{_SERVER_URL}/inc/ajax.php"
payload = {"tabla": "resultados", "filtros": "", "buscar": query}
logger.debug("Query: %s", query)
loops = 1
max_loops_not_met = True
response = self.session.post(search_link, data=payload)
while max_loops_not_met:
max_loops_not_met = loops < max_loops
page_subtitles, last_page = self._get_page_subtitles(params, video)
logger.debug("Yielding %d subtitles [loop #%d]", len(page_subtitles), loops)
yield from page_subtitles
if last_page:
logger.debug("Last page for '%s' query. Breaking loop", query)
break
loops += 1
params["pg"] += 1 # search next page
time.sleep(self.multi_result_throttle)
if not max_loops_not_met:
logger.debug("Max loops limit exceeded (%d)", max_loops)
def _get_page_subtitles(self, params, video):
search_link = f"{_SERVER_URL}/index.php"
response = self.session.get(
search_link, params=params, allow_redirects=True, timeout=20
)
try:
page_subtitles, last_page = self._parse_subtitles_page(video, response)
except Exception as error:
logger.error(f"Error parsing subtitles list: {error}")
if response.status_code == 500:
logger.debug(
"Error 500 (probably bad encoding of query causing issue on provider side): %s",
query,
)
return []
return page_subtitles, last_page
# Ensure it was successful
response.raise_for_status()
# Processing the JSON result
subtitles = []
try:
data = response.json()
except JSONDecodeError:
logger.debug("JSONDecodeError: %s", response.content)
return []
title_checker = _check_episode if isinstance(video, Episode) else _check_movie
# Iterate over each subtitle in the response
for item in data["aaData"]:
id = item["id"]
page_link = f"{_SERVER_URL}/descargar.php?id={id}"
title = _clean_title(item["titulo"])
description = item["descripcion"]
uploader = item["nick"]
download_url = f"{_SERVER_URL}/descargar.php?id={id}"
if _UNSUPPORTED_RE.search(title) is not None:
logger.debug("Skipping unsupported subtitles: %s", title)
continue
if not title_checker(video, title):
continue
spain = _SPANISH_RE.search(description.lower()) is not None
language = Language.fromalpha2("es") if spain else Language("spa", "MX")
subtitle = self.subtitle_class(
language, video, page_link, title, description, uploader, download_url
)
logger.debug("Found subtitle %r", subtitle)
subtitles.append(subtitle)
return subtitles
def list_subtitles(self, video, languages):
return self.query(video, languages)
return self._query(video, languages)
def download_subtitle(self, subtitle):
# download the subtitle
@ -220,62 +246,10 @@ class SubdivxSubtitlesProvider(Provider):
if isinstance(subtitle.video, Episode):
episode = subtitle.video.episode
logger.debug("Episode number: %s", episode)
subtitle.content = get_subtitle_from_archive(archive, episode=episode)
def _parse_subtitles_page(self, video, response):
subtitles = []
page_soup = ParserBeautifulSoup(
response.content.decode("utf-8", "ignore"), ["lxml", "html.parser"]
)
title_soups = page_soup.find_all("div", {"id": "menu_detalle_buscador"})
body_soups = page_soup.find_all("div", {"id": "buscador_detalle"})
title_checker = _check_episode if isinstance(video, Episode) else _check_movie
for subtitle in range(0, len(title_soups)):
title_soup, body_soup = title_soups[subtitle], body_soups[subtitle]
# title
title = _clean_title(title_soup.find("a").text)
if _UNSUPPORTED_RE.search(title):
logger.debug("Skipping unsupported subtitles: %s", title)
continue
if not title_checker(video, title):
continue
# Data
datos = body_soup.find("div", {"id": "buscador_detalle_sub_datos"}).text
# Ignore multi-disc and non-srt subtitles
if not any(item in datos for item in ("Cds:</b> 1", "SubRip")):
continue
# description
sub_details = body_soup.find("div", {"id": "buscador_detalle_sub"}).text
description = sub_details.replace(",", " ")
# language
spain = (
"/pais/7.gif" in datos
or _SPANISH_RE.search(description.lower()) is not None
)
language = Language.fromalpha2("es") if spain else Language("spa", "MX")
# uploader
uploader = body_soup.find("a", {"class": "link1"}).text
download_url = _get_download_url(body_soup)
page_link = title_soup.find("a")["href"]
subtitle = self.subtitle_class(
language, video, page_link, title, description, uploader, download_url
)
logger.debug("Found subtitle %r", subtitle)
subtitles.append(subtitle)
return subtitles, len(title_soups) < 100
def _clean_title(title):
"""
@ -288,17 +262,6 @@ def _clean_title(title):
return title
def _get_download_url(data):
try:
return [
a_.get("href")
for a_ in data.find_all("a")
if "bajar.php" in a_.get("href", "n/a")
][0]
except IndexError:
return None
def _check_episode(video, title):
ep_num = _EPISODE_NUM_RE.search(title)
season_num = _SEASON_NUM_RE.search(title)

View File

@ -126,7 +126,7 @@ class TitrariProvider(Provider, ProviderSubtitleArchiveMixin):
video_types = (Episode, Movie)
api_url = 'https://www.titrari.ro/'
# query_advanced_search = 'cautarepreaavansata'
query_advanced_search = "cautarecutare"
query_advanced_search = "maicauta"
def __init__(self):
self.session = None

View File

@ -18,6 +18,8 @@ from subliminal_patch.subtitle import guess_matches
from ._agent_list import FIRST_THOUSAND_OR_SO_USER_AGENTS
USER_AGENTS = FIRST_THOUSAND_OR_SO_USER_AGENTS
logger = logging.getLogger(__name__)

View File

@ -299,7 +299,7 @@ class WhisperAIProvider(Provider):
# tell the user what we are about to do
sub.release_info = f"{sub.task} {language_from_alpha3(sub.audio_language)} audio -> {language_from_alpha3(language.alpha3)} SRT"
logger.debug(f"Whisper ({video.original_path}): {sub.audio_language} -> {language.alpha3} [TASK: {sub.task}]")
logger.debug(f"Whisper query: ({video.original_path}): {sub.audio_language} -> {language.alpha3} [TASK: {sub.task}]")
return sub
@ -319,6 +319,8 @@ class WhisperAIProvider(Provider):
subtitle.content = None
return
logger.debug(f'Audio stream length (in WAV format) is {len(out):,} bytes')
if subtitle.task == "transcribe":
output_language = subtitle.audio_language
else:
@ -334,6 +336,14 @@ class WhisperAIProvider(Provider):
endTime = time.time()
elapsedTime = timedelta(seconds=round(endTime - startTime))
# for debugging, log if anything got returned
subtitle_length = len(r.content)
logger.debug(f'Returned subtitle length is {subtitle_length:,} bytes')
subtitle_length = min(subtitle_length, 1000)
if subtitle_length > 0:
logger.debug(f'First {subtitle_length} bytes of subtitle: {r.content[0:subtitle_length]}')
logger.info(f'Completed WhisperAI {subtitle.task} to {language_from_alpha3(output_language)} in {elapsedTime} for {subtitle.video.original_path}')
subtitle.content = r.content

View File

@ -123,7 +123,7 @@ class YifySubtitlesProvider(Provider):
rating = int(td[0].text)
sub_lang = td[1].text
release = re.sub(r'^\nsubtitle ', '', td[2].text)
page_link = td[2].find('a').get('href')
page_link = urljoin(server_url, td[2].find('a').get('href'))
hi = True if td[3].find('span', {'class': 'hi-subtitle'}) else False
uploader = td[4].text

View File

@ -14,6 +14,7 @@ class Video(Video_):
season_fully_aired = None
audio_languages = None
external_subtitle_languages = None
info_url = None
def __init__(
self,
@ -31,6 +32,7 @@ class Video(Video_):
streaming_service=None,
edition=None,
other=None,
info_url=None,
**kwargs
):
super(Video, self).__init__(
@ -54,3 +56,4 @@ class Video(Video_):
self.edition = edition
self.original_path = name
self.other = other
self.info_url = info_url

View File

@ -0,0 +1,28 @@
"""Alter table_languages_profiles.originalFormat type to from bool to int
Revision ID: b183a2ac0dd1
Revises: 30f37e2e15e1
Create Date: 2024-02-16 10:32:39.123456
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'b183a2ac0dd1'
down_revision = '30f37e2e15e1'
branch_labels = None
depends_on = None
bind = op.get_context().bind
def upgrade():
if bind.engine.name == 'postgresql':
with op.batch_alter_table('table_languages_profiles') as batch_op:
batch_op.alter_column('originalFormat', type_=sa.Integer())
def downgrade():
pass

View File

@ -35,7 +35,8 @@ def test_get_providers_auth_embeddedsubtitles():
assert isinstance(item["ffprobe_path"], str)
assert isinstance(item["ffmpeg_path"], str)
assert isinstance(item["timeout"], str)
assert isinstance(item["unknown_as_english"], bool)
assert isinstance(item["unknown_as_fallback"], bool)
assert isinstance(item["fallback_lang"], str)
def test_get_providers_auth_karagarga():

View File

@ -126,8 +126,8 @@ def fake_streams():
@pytest.mark.parametrize("tags_", [{}, {"language": "und", "title": "Unknown"}])
def test_list_subtitles_unknown_as_english(mocker, tags_, video_single_language):
with EmbeddedSubtitlesProvider(unknown_as_english=True) as provider:
def test_list_subtitles_unknown_as_fallback(mocker, tags_, video_single_language):
with EmbeddedSubtitlesProvider(unknown_as_fallback=True, fallback_lang="en") as provider:
fake = FFprobeSubtitleStream(
{"index": 3, "codec_name": "subrip", "tags": tags_}
)
@ -141,10 +141,10 @@ def test_list_subtitles_unknown_as_english(mocker, tags_, video_single_language)
assert len(result) == 1
def test_list_subtitles_unknown_as_english_w_real_english_subtitles(
def test_list_subtitles_unknown_as_fallback_w_real_english_subtitles(
video_single_language, mocker
):
with EmbeddedSubtitlesProvider(unknown_as_english=True) as provider:
with EmbeddedSubtitlesProvider(unknown_as_fallback=True, fallback_lang="en") as provider:
fakes = [
FFprobeSubtitleStream(
{"index": 3, "codec_name": "subrip", "tags": {"language": "und"}}
@ -164,8 +164,8 @@ def test_list_subtitles_unknown_as_english_w_real_english_subtitles(
@pytest.mark.parametrize("tags_", [{}, {"language": "und", "title": "Unknown"}])
def test_list_subtitles_unknown_as_english_disabled(tags_):
with EmbeddedSubtitlesProvider(unknown_as_english=False):
def test_list_subtitles_unknown_as_fallback_disabled(tags_):
with EmbeddedSubtitlesProvider(unknown_as_fallback=False,fallback_lang="en"):
with pytest.raises(LanguageNotFound):
assert FFprobeSubtitleStream(
{"index": 3, "codec_name": "subrip", "tags": tags_}

View File

@ -35,14 +35,6 @@ def test_list_subtitles_movie_with_one_difference_year(movies):
assert provider.list_subtitles(item, {Language("spa", "MX")})
def test_handle_multi_page_search(episodes):
with SubdivxSubtitlesProvider() as provider:
for _ in provider._handle_multi_page_search(
"Game Of Thrones", episodes["got_s03e10"]
):
pass
@pytest.mark.parametrize(
"episode_key,expected", [("breaking_bad_s01e01", 15), ("inexistent", 0)]
)
@ -106,7 +98,7 @@ def test_download_subtitle(movies):
"Dune",
"",
"",
"https://www.subdivx.com/bajar.php?id=631101&u=9",
"https://www.subdivx.com/descargar.php?id=631101",
)
with SubdivxSubtitlesProvider() as provider:
provider.download_subtitle(subtitle)
@ -124,7 +116,7 @@ def test_download_subtitle_episode_pack(episodes):
"Breaking Bad S01E01-07",
"Son los del torrent que vienen Formato / Dimensiones 624x352 / Tamaño 351 MB -Incluye los Torrents-",
"",
"https://www.subdivx.com/bajar.php?id=365610&u=7",
"https://www.subdivx.com/descargar.php?id=365610",
)
with SubdivxSubtitlesProvider() as provider:
provider.download_subtitle(subtitle)
@ -168,7 +160,7 @@ def test_subtitle_matches(video):
"otras seguramente, gracias por sus comentarios, saludos."
),
"tolobich",
"https://www.subdivx.com/bajar.php?id=635101&u=9",
"https://www.subdivx.com/descargar.php?id=635101",
)
matches = subtitle.get_matches(video)