mirror of
https://github.com/clinton-hall/nzbToMedia.git
synced 2025-08-14 02:26:53 -07:00
Fix lint errors
This commit is contained in:
parent
bea41c3f2e
commit
b75d448ff7
44 changed files with 835 additions and 241 deletions
|
@ -23,3 +23,16 @@ repos:
|
|||
# rev: v2.0.0
|
||||
# hooks:
|
||||
# - id: autopep8
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: pylint
|
||||
name: pylint
|
||||
entry: pylint
|
||||
language: system
|
||||
types: [python]
|
||||
args:
|
||||
[
|
||||
"-rn", # Only display messages
|
||||
"-sn", # Disable score
|
||||
"--rcfile=.pylintrc.ini", # Link to your config file
|
||||
]
|
||||
|
|
528
.pylintrc.ini
Normal file
528
.pylintrc.ini
Normal file
|
@ -0,0 +1,528 @@
|
|||
[MAIN]
|
||||
load-plugins=
|
||||
pylint.extensions.broad_try_clause,
|
||||
pylint.extensions.code_style,
|
||||
pylint.extensions.emptystring,
|
||||
pylint.extensions.comparetozero,
|
||||
pylint.extensions.comparison_placement,
|
||||
pylint.extensions.confusing_elif,
|
||||
pylint.extensions.for_any_all,
|
||||
pylint.extensions.consider_ternary_expression,
|
||||
pylint.extensions.bad_builtin,
|
||||
pylint.extensions.mccabe,
|
||||
; pylint.extensions.dict_init_mutate,
|
||||
pylint.extensions.docstyle,
|
||||
; pylint.extensions.dunder,
|
||||
pylint.extensions.check_elif,
|
||||
pylint.extensions.empty_comment,
|
||||
pylint.extensions.eq_without_hash,
|
||||
pylint.extensions.private_import,
|
||||
; pylint.extensions.magic_value,
|
||||
pylint.extensions.redefined_variable_type,
|
||||
pylint.extensions.no_self_use,
|
||||
pylint.extensions.overlapping_exceptions,
|
||||
pylint.extensions.docparams,
|
||||
pylint.extensions.redefined_loop_name,
|
||||
pylint.extensions.set_membership,
|
||||
pylint.extensions.typing,
|
||||
pylint.extensions.while_used,
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
# Disable the message, report, category or checker with the given id(s). You
|
||||
# can either give multiple identifiers separated by comma (,) or put this
|
||||
# option multiple times (only on the command line, not in the configuration
|
||||
# file where it should appear only once).You can also use "--disable=all" to
|
||||
# disable everything first and then re-enable specific checks. For example, if
|
||||
# you want to run only the similarities checker, you can use "--disable=all
|
||||
# --enable=similarities". If you want to run only the classes checker, but have
|
||||
# no Warning level messages displayed, use"--disable=all --enable=classes
|
||||
# --disable=W"
|
||||
|
||||
; # --- FATAL ---------
|
||||
; F0001, # fatal
|
||||
; F0002, # astroid-error
|
||||
; F0010, # parse-error
|
||||
; F0011, # config-parse-error
|
||||
; F0202, # method-check-failed
|
||||
|
||||
; # --- ERROR ---------
|
||||
; E0001, # syntax-error
|
||||
; E0011, # unrecognized-inline-option
|
||||
; E0013, # bad-plugin-value
|
||||
; E0014, # bad-configuration-SECTION
|
||||
; E0015, # unrecognized-option
|
||||
; E0100, # init-is-generator
|
||||
; E0101, # return-in-init
|
||||
; E0102, # function-redefined
|
||||
; E0103, # not-in-loop
|
||||
; E0104, # return-outside-function
|
||||
; E0105, # yield-outside-function,
|
||||
; E0106, # return-arg-in-generator
|
||||
; E0107, # nonexistent-operator
|
||||
; E0108, # duplicate-argument-name
|
||||
; E0110, # abstract-class-instantiated
|
||||
; E0111, # bad-reversed-sequence
|
||||
; E0112, # too-many-star-expressions
|
||||
; E0113, # invalid-star-assignment-target
|
||||
; E0114, # star-needs-assignment-target
|
||||
; E0115, # nonlocal-and-global
|
||||
; E0116, # continue-in-finally
|
||||
; E0117, # nonlocal-without-binding
|
||||
; E0118, # used-prior-global-declaration
|
||||
; E0119, # misplaced-format-function
|
||||
; E0202, # method-hidden
|
||||
; E0203, # access-member-before-definition
|
||||
; E0211, # no-method-argument
|
||||
; E0213, # no-self-argument
|
||||
; E0236, # invalid-slots-object
|
||||
; E0237, # assigning-non-slot
|
||||
; E0238, # invalid-slots
|
||||
; E0239, # inherit-non-class
|
||||
; E0240, # inconsistent-mro
|
||||
; E0241, # duplicate-bases
|
||||
; E0242, # class-variable-slots-conflict
|
||||
; E0243, # invalid-class-object
|
||||
; E0244, # invalid-enum-extension
|
||||
; E0301, # non-iterator-returned
|
||||
; E0302, # unexpected-special-method-signature
|
||||
; E0303, # invalid-length-returned
|
||||
; E0304, # invalid-bool-returned
|
||||
; E0305, # invalid-index-returned
|
||||
; E0306, # invalid-repr-returned
|
||||
; E0307, # invalid-str-returned
|
||||
; E0308, # invalid-bytes-returned
|
||||
; E0309, # invalid-hash-returned
|
||||
; E0310, # invalid-length-hint-returned
|
||||
; E0311, # invalid-format-returned
|
||||
; E0312, # invalid-getnewargs-returned
|
||||
; E0313, # invalid-getnewargs-ex-returned
|
||||
; E0401, # import-error
|
||||
; E0402, # relative-beyond-top-level
|
||||
; E0601, # used-before-assignment
|
||||
; E0602, # undefined-variable
|
||||
; E0603, # undefined-all-variable
|
||||
; E0604, # invalid-all-object
|
||||
; E0605, # invalid-all-format
|
||||
; E0611, # no-name-in-module
|
||||
; E0633, # unpacking-non-sequence
|
||||
; E0643, # potential-index-error
|
||||
; E0701, # bad-except-order
|
||||
; E0702, # raising-bad-type
|
||||
; E0704, # misplaced-bare-raise
|
||||
; E0705, # bad-exception-cause
|
||||
; E0710, # raising-non-exception
|
||||
; E0711, # notimplemented-raised
|
||||
; E0712, # catching-non-exception
|
||||
; E1003, # bad-super-call
|
||||
; E1101, # no-member
|
||||
; E1102, # not-callable
|
||||
; E1111, # assignment-from-no-return
|
||||
; E1120, # no-value-for-parameter
|
||||
; E1121, # too-many-function-args
|
||||
; E1123, # unexpected-keyword-arg
|
||||
; E1124, # redundant-keyword-arg
|
||||
; E1125, # missing-kwoa
|
||||
; E1126, # invalid-sequence-index
|
||||
; E1127, # invalid-slice-index
|
||||
; E1128, # assignment-from-none
|
||||
; E1129, # not-context-manager
|
||||
; E1130, # invalid-unary-operand-type
|
||||
; E1131, # unsupported-binary-operation
|
||||
; E1132, # repeated-keyword
|
||||
; E1133, # not-an-iterable
|
||||
; E1134, # not-a-mapping
|
||||
; E1135, # unsupported-membership-test
|
||||
; E1136, # unsubscriptable-object
|
||||
; E1137, # unsupported-assignment-operation
|
||||
; E1138, # unsupported-delete-operation
|
||||
; E1139, # invalid-metaclass
|
||||
; E1141, # dict-iter-missing-items
|
||||
; E1142, # await-outside-async
|
||||
; E1143, # unhashable-member
|
||||
; E1144, # invalid-slice-step
|
||||
; E1200, # logging-unsupported-format
|
||||
; E1201, # logging-format-truncated
|
||||
; E1205, # logging-too-many-args
|
||||
; E1206, # logging-too-few-args
|
||||
; E1300, # bad-format-character
|
||||
; E1301, # truncated-format-string
|
||||
; E1302, # mixed-format-string
|
||||
; E1303, # format-needs-mapping
|
||||
; E1304, # missing-format-string-key
|
||||
; E1305, # too-many-format-args
|
||||
; E1306, # too-few-format-args
|
||||
; E1307, # bad-string-format-type
|
||||
; E1310, # bad-str-strip-call
|
||||
; E1507, # invalid-envvar-value
|
||||
; E1519, # singledispatch-method
|
||||
; E1520, # singledispatchmethod-function
|
||||
; E1700, # yield-inside-async-function
|
||||
; E1701, # not-async-context-manager
|
||||
; E2501, # invalid-unicode-codec
|
||||
; E2502, # bidirectional-unicode
|
||||
; E2510, # invalid-character-backspace
|
||||
; E2511, # invalid-character-carriage-return
|
||||
; E2512, # invalid-character-sub
|
||||
; E2513, # invalid-character-esc
|
||||
; E2514, # invalid-character-nul
|
||||
; E2515, # invalid-character-zero-width-space
|
||||
; E4702, # modified-iterating-dict
|
||||
; E4703, # modified-iterating-set
|
||||
; E6004, # broken-noreturn
|
||||
; E6005, # broken-collections-callable
|
||||
; # --- WARNING -------
|
||||
; W0012, # unknown-option-value
|
||||
; W0101, # unreachable
|
||||
; W0102, # dangerous-default-value
|
||||
; W0104, # pointless-statement
|
||||
; W0105, # pointless-string-statement
|
||||
; W0106, # expression-not-assigned
|
||||
; W0107, # unnecessary-pass
|
||||
; W0108, # unnecessary-lambda
|
||||
; W0109, # duplicate-key
|
||||
; W0120, # useless-else-on-loop
|
||||
; W0122, # exec-used
|
||||
; W0123, # eval-used
|
||||
; W0124, # confusing-with-statement
|
||||
; W0125, # using-constant-test
|
||||
; W0126, # missing-parentheses-for-call-in-test
|
||||
; W0127, # self-assigning-variable
|
||||
; W0128, # redeclared-assigned-name
|
||||
; W0129, # assert-on-string-literal
|
||||
; W0130, # duplicate-value
|
||||
; W0131, # named-expr-without-context
|
||||
; W0141, # bad-builtin
|
||||
; W0143, # comparison-with-callable
|
||||
; W0149, # while-used
|
||||
; W0150, # lost-exception
|
||||
; W0160, # consider-ternary-expression
|
||||
; W0177, # nan-comparison
|
||||
; W0199, # assert-on-tuple
|
||||
; W0201, # attribute-defined-outside-init
|
||||
; W0211, # bad-staticmethod-argument
|
||||
; W0212, # protected-access
|
||||
; W0221, # arguments-differ
|
||||
; W0222, # signature-differs
|
||||
; W0223, # abstract-method
|
||||
; W0231, # super-init-not-called
|
||||
; W0233, # non-parent-init-called
|
||||
; W0236, # invalid-overridden-method
|
||||
; W0237, # arguments-renamed
|
||||
; W0238, # unused-private-member
|
||||
; W0239, # overridden-final-method
|
||||
; W0240, # subclassed-final-class
|
||||
; W0244, # redefined-slots-in-subclass
|
||||
; W0245, # super-without-brackets
|
||||
; W0246, # useless-parent-delegation
|
||||
; W0301, # unnecessary-semicolon
|
||||
; W0311, # bad-indentation
|
||||
; W0401, # wildcard-import
|
||||
; W0404, # reimported
|
||||
; W0406, # import-self
|
||||
; W0407, # preferred-module
|
||||
; W0410, # misplaced-future
|
||||
; W0416, # shadowed-import
|
||||
; W0511, # fixme
|
||||
; W0601, # global-variable-undefined
|
||||
; W0602, # global-variable-not-assigned
|
||||
; W0603, # global-statement
|
||||
; W0604, # global-at-module-level
|
||||
; W0611, # unused-import
|
||||
; W0612, # unused-variable
|
||||
; W0613, # unused-argument
|
||||
; W0614, # unused-wildcard-import
|
||||
; W0621, # redefined-outer-name
|
||||
; W0622, # redefined-builtin
|
||||
; W0631, # undefined-loop-variable
|
||||
; W0632, # unbalanced-tuple-unpacking
|
||||
; W0640, # cell-var-from-loop
|
||||
; W0641, # possibly-unused-variable
|
||||
; W0642, # self-cls-assignment
|
||||
; W0644, # unbalanced-dict-unpacking
|
||||
; W0702, # bare-except
|
||||
; W0705, # duplicate-except
|
||||
; W0706, # try-except-raise
|
||||
; W0707, # raise-missing-from
|
||||
; W0711, # binary-op-exception
|
||||
; W0714, # overlapping-except
|
||||
; W0715, # raising-format-tuple
|
||||
; W0716, # wrong-exception-operation
|
||||
; W0717, # too-many-try-statements
|
||||
; W0718, # broad-exception-caught
|
||||
; W0719, # broad-exception-raised
|
||||
; W1113, # keyword-arg-before-vararg
|
||||
; W1114, # arguments-out-of-order
|
||||
; W1115, # non-str-assignment-to-dunder-name
|
||||
; W1116, # isinstance-second-argument-not-valid-type
|
||||
; W1201, # logging-not-lazy
|
||||
; W1202, # logging-format-interpolation
|
||||
; W1203, # logging-fstring-interpolation
|
||||
; W1300, # bad-format-string-key
|
||||
; W1301, # unused-format-string-key
|
||||
; W1302, # bad-format-string
|
||||
; W1303, # missing-format-argument-key
|
||||
; W1304, # unused-format-string-argument
|
||||
; W1305, # format-combined-specification
|
||||
; W1306, # missing-format-attribute
|
||||
; W1307, # invalid-format-index
|
||||
; W1308, # duplicate-string-formatting-argument
|
||||
; W1309, # f-string-without-interpolation
|
||||
; W1310, # format-string-without-interpolation
|
||||
; W1401, # anomalous-backslash-in-string
|
||||
; W1402, # anomalous-unicode-escape-in-string
|
||||
; W1404, # implicit-str-concat
|
||||
; W1405, # inconsistent-quotes
|
||||
; W1406, # redundant-u-string-prefix
|
||||
; W1501, # bad-open-mode
|
||||
; W1502, # boolean-datetime
|
||||
; W1503, # redundant-unittest-assert
|
||||
; W1506, # bad-thread-instantiation
|
||||
; W1507, # shallow-copy-environ
|
||||
; W1508, # invalid-envvar-default
|
||||
; W1509, # subprocess-popen-preexec-fn
|
||||
; W1510, # subprocess-run-check
|
||||
; W1514, # unspecified-encoding
|
||||
; W1515, # forgotten-debug-statement
|
||||
; W1518, # method-cache-max-size-none
|
||||
; W1641, # eq-without-hash
|
||||
; W2101, # useless-with-lock
|
||||
; W2301, # unnecessary-ellipsis
|
||||
; W2402, # non-ascii-file-name
|
||||
; W2601, # using-f-string-in-unsupported-version
|
||||
; W2602, # using-final-decorator-in-unsupported-version
|
||||
; W2901, # redefined-loop-name
|
||||
; W3101, # missing-timeout
|
||||
; W3201, # bad-dunder-name
|
||||
; W3301, # nested-min-max
|
||||
; W4701, # modified-iterating-list
|
||||
; W4901, # deprecated-module
|
||||
; W4902, # deprecated-method
|
||||
; W4903, # deprecated-argument
|
||||
; W4904, # deprecated-class
|
||||
; W4905, # deprecated-decorator
|
||||
; W6001, # deprecated-typing-alias
|
||||
; W9005, # multiple-constructor-doc
|
||||
; W9006, # missing-raises-doc
|
||||
; W9008, # redundant-returns-doc
|
||||
; W9010, # redundant-yields-doc
|
||||
; W9011, # missing-return-doc
|
||||
; W9012, # missing-return-type-doc
|
||||
; W9013, # missing-yield-doc
|
||||
; W9015, # missing-param-doc
|
||||
; W9014, # missing-yield-type-doc
|
||||
; W9016, # missing-type-doc
|
||||
; W9017, # differing-param-doc
|
||||
; W9018, # differing-type-doc
|
||||
; W9019, # useless-param-doc
|
||||
; W9020, # useless-type-doc
|
||||
; W9021, # missing-any-param-doc
|
||||
|
||||
; # --- CONVENTION ----
|
||||
; C0103, # invalid-name
|
||||
; C0104, # disallowed-name
|
||||
; C0105, # typevar-name-incorrect-variance
|
||||
; C0112, # empty-docstring
|
||||
; C0113, # unneeded-not
|
||||
; C0114, # missing-module-docstring
|
||||
; C0115, # missing-class-docstring
|
||||
; C0116, # missing-function-docstring
|
||||
; C0121, # singleton-comparison
|
||||
; C0123, # unidiomatic-typecheck
|
||||
; C0131, # typevar-double-variance
|
||||
; C0132, # typevar-name-mismatch
|
||||
; C0198, # bad-docstring-quotes
|
||||
; C0199, # docstring-first-line-empty
|
||||
; C0200, # consider-using-enumerate
|
||||
; C0201, # consider-iterating-dictionary
|
||||
; C0202, # bad-classmethod-argument
|
||||
; C0203, # bad-mcs-method-argument
|
||||
; C0204, # bad-mcs-classmethod-argument
|
||||
; C0205, # single-string-used-for-slots
|
||||
; C0206, # consider-using-dict-items
|
||||
; C0207, # use-maxsplit-arg
|
||||
; C0208, # use-sequence-for-iteration
|
||||
; C0209, # consider-using-f-string
|
||||
; C0301, # line-too-long
|
||||
; C0302, # too-many-lines
|
||||
; C0303, # trailing-whitespace
|
||||
; C0304, # missing-final-newline
|
||||
; C0305, # trailing-newlines
|
||||
; C0321, # multiple-statements
|
||||
; C0325, # superfluous-parens
|
||||
; C0327, # mixed-line-endings
|
||||
; C0328, # unexpected-line-ending-format
|
||||
; C0401, # wrong-spelling-in-comment
|
||||
; C0402, # wrong-spelling-in-docstring
|
||||
; C0403, # invalid-characters-in-docstring
|
||||
; C0410, # multiple-imports
|
||||
; C0411, # wrong-import-order
|
||||
; C0412, # ungrouped-imports
|
||||
; C0413, # wrong-import-position
|
||||
; C0414, # useless-import-alias
|
||||
; C0415, # import-outside-toplevel
|
||||
; C0501, # consider-using-any-or-all
|
||||
; C1802, # use-implicit-booleaness-not-len
|
||||
; C1803, # use-implicit-booleaness-not-comparison
|
||||
; C1901, # compare-to-empty-string
|
||||
; C2001, # compare-to-zero
|
||||
; C2201, # misplaced-comparison-constant
|
||||
; C2401, # non-ascii-name
|
||||
; C2403, # non-ascii-module-import
|
||||
; C2503, # bad-file-encoding
|
||||
; C2701, # import-private-name
|
||||
; C2801, # unnecessary-dunder-call
|
||||
; C3001, # unnecessary-lambda-assignment
|
||||
; C3002, # unnecessary-direct-lambda-call
|
||||
; C3401, # dict-init-mutate
|
||||
|
||||
; # --- REFACTOR ------
|
||||
; R0022, # useless-option-value
|
||||
; R0123, # literal-comparison
|
||||
; R0124, # comparison-with-itself
|
||||
; R0133, # comparison-of-constants
|
||||
; R0202, # no-classmethod-decorator
|
||||
; R0203, # no-staticmethod-decorator
|
||||
; R0204, # redefined-variable-type
|
||||
; R0205, # useless-object-inheritance
|
||||
; R0206, # property-with-parameters
|
||||
; R0401, # cyclic-import
|
||||
; R0402, # consider-using-from-import
|
||||
; R0801, # duplicate-code
|
||||
; R0901, # too-many-ancestors
|
||||
; R0902, # too-many-instance-attributes
|
||||
; R0903, # too-few-public-methods
|
||||
; R0904, # too-many-public-methods
|
||||
; R0911, # too-many-return-statements
|
||||
; R0912, # too-many-branches
|
||||
; R0913, # too-many-arguments
|
||||
; R0914, # too-many-locals
|
||||
; R0915, # too-many-statements
|
||||
; R0916, # too-many-boolean-expressions
|
||||
; R1260, # too-complex
|
||||
; R1701, # consider-merging-isinstance
|
||||
; R1702, # too-many-nested-blocks
|
||||
; R1703, # simplifiable-if-statement
|
||||
; R1704, # redefined-argument-from-local
|
||||
; R1705, # no-else-return
|
||||
; R1706, # consider-using-ternary
|
||||
; R1707, # trailing-comma-tuple
|
||||
; R1708, # stop-iteration-return
|
||||
; R1709, # simplify-boolean-expression
|
||||
; R1710, # inconsistent-return-statements
|
||||
; R1711, # useless-return
|
||||
; R1712, # consider-swap-variables
|
||||
; R1713, # consider-using-join
|
||||
; R1714, # consider-using-in
|
||||
; R1715, # consider-using-get
|
||||
; R1716, # chained-comparison
|
||||
; R1717, # consider-using-dict-comprehension
|
||||
; R1718, # consider-using-set-comprehension
|
||||
; R1719, # simplifiable-if-expression
|
||||
; R1720, # no-else-raise
|
||||
; R1721, # unnecessary-comprehension
|
||||
; R1722, # consider-using-sys-exit
|
||||
; R1723, # no-else-break
|
||||
; R1724, # no-else-continue
|
||||
; R1725, # super-with-arguments
|
||||
; R1726, # simplifiable-condition
|
||||
; R1727, # condition-evals-to-constant
|
||||
; R1728, # consider-using-generator
|
||||
; R1729, # use-a-generator
|
||||
; R1730, # consider-using-min-builtin
|
||||
; R1731, # consider-using-max-builtin
|
||||
; R1732, # consider-using-with
|
||||
; R1733, # unnecessary-dict-index-lookup
|
||||
; R1734, # use-list-literal
|
||||
; R1735, # use-dict-literal
|
||||
; R1736, # unnecessary-list-index-lookup
|
||||
; R2004, # magic-value-comparison
|
||||
; R2044, # empty-comment
|
||||
; R5501, # else-if-used
|
||||
; R5601, # confusing-consecutive-elif
|
||||
; R6002, # consider-using-alias
|
||||
; R6003, # consider-alternative-union-syntax
|
||||
; R6006, # redundant-typehint-argument
|
||||
; R6101, # consider-using-namedtuple-or-dataclass
|
||||
; R6102, # consider-using-tuple
|
||||
; R6103, # consider-using-assignment-expr
|
||||
; R6104, # consider-using-augmented-assign
|
||||
; R6201, # use-set-for-membership
|
||||
; R6301, # no-self-use
|
||||
|
||||
; # --- INFORMATION ---
|
||||
; I0001, # raw-checker-failed
|
||||
; I0010, # bad-inline-option
|
||||
; I0011, # locally-disabled
|
||||
; I0013, # file-ignored
|
||||
; I0020, # suppressed-message
|
||||
; I0021, # useless-suppression
|
||||
; I0022, # deprecated-pragma
|
||||
; I0023, # use-symbolic-message-instead
|
||||
; I1101, # c-extension-no-member
|
||||
|
||||
disable=
|
||||
E1101, # no-member
|
||||
|
||||
W0141, # bad-builtin
|
||||
W0149, # while-used
|
||||
W0160, # consider-ternary-expression
|
||||
W0201, # attribute-defined-outside-init
|
||||
W0212, # protected-access
|
||||
W0511, # fixme
|
||||
W0601, # global-variable-undefined
|
||||
W0602, # global-variable-not-assigned
|
||||
W0603, # global-statement
|
||||
W0612, # unused-variable
|
||||
W0621, # redefined-outer-name
|
||||
W0631, # undefined-loop-variable
|
||||
W0703, # broad-except
|
||||
W0717, # too-many-try-statements
|
||||
W1202, # logging-format-interpolation
|
||||
W1203, # logging-fstring-interpolation
|
||||
W1404, # implicit-str-concat
|
||||
W2901, # redefined-loop-name
|
||||
W3101, # missing-timeout
|
||||
W6001, # deprecated-typing-alias
|
||||
W9016, # missing-type-do
|
||||
|
||||
C0103, # invalid-name
|
||||
C0114, # missing-module-docstring
|
||||
C0115, # missing-class-docstring
|
||||
C0116, # missing-function-docstring
|
||||
C0199, # docstring-first-line-empty
|
||||
C0201, # consider-iterating-dictionary
|
||||
C0206, # consider-using-dict-items
|
||||
C0301, # line-too-long
|
||||
C0415, # import-outside-toplevel
|
||||
C1901, # compare-to-empty-string
|
||||
C2001, # compare-to-zero
|
||||
|
||||
R0204, # redifined-variable-type
|
||||
R0401, # cyclic-import
|
||||
R0801, # duplicate-code
|
||||
R0903, # too-few-public-methods
|
||||
R0902, # too-many-instance-attributes
|
||||
R0911, # too-many-return-statements
|
||||
R0912, # too-many-branches
|
||||
R0913, # too-many-arguments
|
||||
R0914, # too-many-locals
|
||||
R0915, # too-many-statements
|
||||
R0916, # too-many-boolean-expressions
|
||||
R1260, # too-complex
|
||||
R1702, # too-many-nested-blocks
|
||||
R1704, # redefined-argument-from-local
|
||||
R1710, # inconsistent-return-statements
|
||||
R5501, # else-if-used
|
||||
R5601, # confusing-consecutive-elif
|
||||
R6003, # consider-alternative-union-syntax
|
||||
R6102, # consider-using-tuple
|
||||
R6103, # consider-using-assignment-expr
|
||||
|
||||
I0011, # locally-disabled
|
||||
I0020, # suppressed-message
|
||||
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
# multiple time (only on the command line, not in the configuration file where
|
||||
# it should appear only once). See also the "--disable" option for examples.
|
||||
enable=
|
|
@ -60,7 +60,7 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
|
|||
|
||||
log.debug(f'Determined Directory: {input_directory} | Name: {input_name} | Category: {input_category}')
|
||||
|
||||
# auto-detect section
|
||||
# auto-detect SECTION
|
||||
section = nzb2media.CFG.findsection(input_category).isenabled()
|
||||
if section is None: # Check for user_scripts for 'ALL' and 'UNCAT'
|
||||
if usercat in nzb2media.CATEGORIES:
|
||||
|
@ -122,10 +122,9 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
|
|||
|
||||
log.debug(f'Scanning files in directory: {input_directory}')
|
||||
|
||||
if section_name in ['HeadPhones', 'Lidarr']:
|
||||
nzb2media.NOFLATTEN.extend(
|
||||
input_category,
|
||||
) # Make sure we preserve folder structure for HeadPhones.
|
||||
if section_name in {'HeadPhones', 'Lidarr'}:
|
||||
# Make sure we preserve folder structure for HeadPhones.
|
||||
nzb2media.NOFLATTEN.extend(input_category)
|
||||
|
||||
now = datetime.datetime.now()
|
||||
|
||||
|
@ -138,10 +137,10 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
|
|||
log.debug(f'Found 1 file to process: {input_directory}')
|
||||
else:
|
||||
log.debug(f'Found {len(input_files)} files in {input_directory}')
|
||||
for inputFile in input_files:
|
||||
file_path = os.path.dirname(inputFile)
|
||||
file_name, file_ext = os.path.splitext(os.path.basename(inputFile))
|
||||
full_file_name = os.path.basename(inputFile)
|
||||
for input_file in input_files:
|
||||
file_path = os.path.dirname(input_file)
|
||||
file_name, file_ext = os.path.splitext(os.path.basename(input_file))
|
||||
full_file_name = os.path.basename(input_file)
|
||||
|
||||
target_file = nzb2media.os.path.join(output_destination, full_file_name)
|
||||
if input_category in nzb2media.NOFLATTEN:
|
||||
|
@ -152,9 +151,9 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
|
|||
log.debug(f'Setting outputDestination to {os.path.dirname(target_file)} to preserve folder structure')
|
||||
if root == 1:
|
||||
if not found_file:
|
||||
log.debug(f'Looking for {input_name} in: {inputFile}')
|
||||
log.debug(f'Looking for {input_name} in: {input_file}')
|
||||
if any([
|
||||
nzb2media.sanitize_name(input_name) in nzb2media.sanitize_name(inputFile),
|
||||
nzb2media.sanitize_name(input_name) in nzb2media.sanitize_name(input_file),
|
||||
nzb2media.sanitize_name(file_name) in nzb2media.sanitize_name(input_name),
|
||||
]):
|
||||
found_file = True
|
||||
|
@ -163,8 +162,8 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
|
|||
continue
|
||||
|
||||
if root == 2:
|
||||
mtime_lapse = now - datetime.datetime.fromtimestamp(os.path.getmtime(inputFile))
|
||||
ctime_lapse = now - datetime.datetime.fromtimestamp(os.path.getctime(inputFile))
|
||||
mtime_lapse = now - datetime.datetime.fromtimestamp(os.path.getmtime(input_file))
|
||||
ctime_lapse = now - datetime.datetime.fromtimestamp(os.path.getctime(input_file))
|
||||
|
||||
if not found_file:
|
||||
log.debug('Looking for files with modified/created dates less than 5 minutes old.')
|
||||
|
@ -176,10 +175,10 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
|
|||
|
||||
if torrent_no_link == 0:
|
||||
try:
|
||||
nzb2media.copy_link(inputFile, target_file, nzb2media.USE_LINK)
|
||||
nzb2media.copy_link(input_file, target_file, nzb2media.USE_LINK)
|
||||
nzb2media.remove_read_only(target_file)
|
||||
except Exception:
|
||||
log.error(f'Failed to link: {inputFile} to {target_file}')
|
||||
log.error(f'Failed to link: {input_file} to {target_file}')
|
||||
|
||||
input_name, output_destination = convert_to_ascii(input_name, output_destination)
|
||||
|
||||
|
@ -192,7 +191,7 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
|
|||
nzb2media.flatten(output_destination)
|
||||
|
||||
# Now check if video files exist in destination:
|
||||
if section_name in ['SickBeard', 'SiCKRAGE', 'NzbDrone', 'Sonarr', 'CouchPotato', 'Radarr', 'Watcher3']:
|
||||
if section_name in {'SickBeard', 'SiCKRAGE', 'NzbDrone', 'Sonarr', 'CouchPotato', 'Radarr', 'Watcher3'}:
|
||||
num_videos = len(
|
||||
nzb2media.list_media_files(output_destination, media=True, audio=False, meta=False, archives=False),
|
||||
)
|
||||
|
@ -232,7 +231,7 @@ def process_torrent(input_directory, input_name, input_category, input_hash, inp
|
|||
'Mylar': comics.process,
|
||||
'Gamez': games.process,
|
||||
}
|
||||
if input_hash and section_name in ['SickBeard', 'SiCKRAGE', 'NzbDrone', 'Sonarr']:
|
||||
if input_hash and section_name in {'SickBeard', 'SiCKRAGE', 'NzbDrone', 'Sonarr'}:
|
||||
input_hash = input_hash.upper()
|
||||
processor = process_map[section_name]
|
||||
result = processor(
|
||||
|
@ -358,4 +357,4 @@ def main(args):
|
|||
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit(main(sys.argv))
|
||||
sys.exit(main(sys.argv))
|
||||
|
|
|
@ -115,7 +115,7 @@ FORCE_CLEAN = None
|
|||
SAFE_MODE = None
|
||||
NOEXTRACTFAILED = None
|
||||
NZB_CLIENT_AGENT = None
|
||||
SABNZBD_HOST = None
|
||||
SABNZBD_HOST = ''
|
||||
SABNZBD_PORT = None
|
||||
SABNZBD_APIKEY = None
|
||||
NZB_DEFAULT_DIRECTORY = None
|
||||
|
@ -256,7 +256,7 @@ def configure_locale():
|
|||
except (locale.Error, OSError):
|
||||
pass
|
||||
# For OSes that are poorly configured I'll just randomly force UTF-8
|
||||
if not SYS_ENCODING or SYS_ENCODING in ('ANSI_X3.4-1968', 'US-ASCII', 'ASCII'):
|
||||
if not SYS_ENCODING or SYS_ENCODING in {'ANSI_X3.4-1968', 'US-ASCII', 'ASCII'}:
|
||||
SYS_ENCODING = 'UTF-8'
|
||||
|
||||
|
||||
|
@ -275,7 +275,7 @@ def configure_migration():
|
|||
CFG = Config.addnzbget()
|
||||
else: # load newly migrated config
|
||||
log.info(f'Loading config from [{CONFIG_FILE}]')
|
||||
CFG = Config()
|
||||
CFG = Config(None)
|
||||
|
||||
|
||||
def configure_logging_part_2():
|
||||
|
@ -605,7 +605,7 @@ def configure_transcoder():
|
|||
ABITRATE3 = transcode_defaults[DEFAULTS]['ABITRATE3']
|
||||
SCODEC = transcode_defaults[DEFAULTS]['SCODEC']
|
||||
transcode_defaults = {} # clear memory
|
||||
if transcode_defaults in ['mp4-scene-release'] and not OUTPUTQUALITYPERCENT:
|
||||
if transcode_defaults in {'mp4-scene-release'} and not OUTPUTQUALITYPERCENT:
|
||||
OUTPUTQUALITYPERCENT = 100
|
||||
if VEXTENSION in allow_subs:
|
||||
ALLOWSUBS = 1
|
||||
|
@ -643,7 +643,7 @@ def configure_passwords_file():
|
|||
def configure_sections(section):
|
||||
global SECTIONS
|
||||
global CATEGORIES
|
||||
# check for script-defied section and if None set to allow sections
|
||||
# check for script-defied SECTION and if None set to allow sections
|
||||
SECTIONS = CFG[tuple(x for x in CFG if CFG[x].sections and CFG[x].isenabled()) if not section else (section,)]
|
||||
for section, subsections in SECTIONS.items():
|
||||
CATEGORIES.extend([subsection for subsection in subsections if CFG[section][subsection].isenabled()])
|
||||
|
@ -662,11 +662,11 @@ def configure_utility_locations():
|
|||
FFPROBE = os.path.join(FFMPEG_PATH, 'ffprobe.exe')
|
||||
SEVENZIP = os.path.join(APP_ROOT, 'nzb2media', 'extractor', 'bin', platform.machine(), '7z.exe')
|
||||
SHOWEXTRACT = int(str(CFG['Windows']['show_extraction']), 0)
|
||||
if not (os.path.isfile(FFMPEG)): # problem
|
||||
if not os.path.isfile(FFMPEG): # problem
|
||||
FFMPEG = None
|
||||
log.warning('Failed to locate ffmpeg.exe. Transcoding disabled!')
|
||||
log.warning('Install ffmpeg with x264 support to enable this feature ...')
|
||||
if not (os.path.isfile(FFPROBE)):
|
||||
if not os.path.isfile(FFPROBE):
|
||||
FFPROBE = None
|
||||
if CHECK_MEDIA:
|
||||
log.warning('Failed to locate ffprobe.exe. Video corruption detection disabled!')
|
||||
|
|
|
@ -50,7 +50,7 @@ def command_complete(url, params, headers, section):
|
|||
return None
|
||||
|
||||
|
||||
def completed_download_handling(url2, headers, section='MAIN'):
|
||||
def completed_download_handling(url2, headers):
|
||||
try:
|
||||
response = requests.get(url2, params={}, headers=headers, stream=True, verify=False, timeout=(30, 60))
|
||||
except requests.ConnectionError:
|
||||
|
|
|
@ -267,7 +267,8 @@ def process(*, section: str, dir_name: str, input_name: str = '', status: int =
|
|||
remove_dir(dir_name)
|
||||
if not release_id and not media_id:
|
||||
log.error(f'Could not find a downloaded movie in the database matching {input_name}, exiting!')
|
||||
return ProcessResult(message='{0}: Failed to post-process - Failed download not found in {0}'.format(section), status_code=1)
|
||||
msg = f'{section}: Failed to post-process - Failed download not found in {section}'
|
||||
return ProcessResult(message=msg, status_code=1)
|
||||
if release_id:
|
||||
log.debug(f'Setting failed release {input_name} to ignored ...')
|
||||
url = f'{base_url}release.ignore'
|
||||
|
@ -277,7 +278,8 @@ def process(*, section: str, dir_name: str, input_name: str = '', status: int =
|
|||
response = requests.get(url, params=params, verify=False, timeout=(30, 120))
|
||||
except requests.ConnectionError:
|
||||
log.error(f'Unable to open URL {url}')
|
||||
return ProcessResult(message='{0}: Failed to post-process - Unable to connect to {0}'.format(section), status_code=1)
|
||||
msg = f'{section}: Failed to post-process - Unable to connect to {section}'
|
||||
return ProcessResult(message=msg, status_code=1)
|
||||
result = response.json()
|
||||
if response.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]:
|
||||
log.error(f'Server returned status {response.status_code}')
|
||||
|
@ -338,12 +340,12 @@ def process(*, section: str, dir_name: str, input_name: str = '', status: int =
|
|||
command_status = command_complete(url, params, headers, section)
|
||||
if command_status:
|
||||
log.debug(f'The Scan command return status: {command_status}')
|
||||
if command_status in ['completed']:
|
||||
if command_status in {'completed'}:
|
||||
log.debug('The Scan command has completed successfully. Renaming was successful.')
|
||||
return ProcessResult.success(f'{section}: Successfully post-processed {input_name}')
|
||||
if command_status in ['failed']:
|
||||
if command_status in {'failed'}:
|
||||
log.debug('The Scan command has failed. Renaming was not successful.')
|
||||
# return ProcessResult(message='{0}: Failed to post-process {1}'.format(section, input_name), status_code=1)
|
||||
# return ProcessResult(message='{0}: Failed to post-process {1}'.format(SECTION, input_name), status_code=1)
|
||||
if not os.path.isdir(dir_name):
|
||||
log.debug(f'SUCCESS: Input Directory [{dir_name}] has been processed and removed')
|
||||
return ProcessResult.success(f'{section}: Successfully post-processed {input_name}')
|
||||
|
@ -353,7 +355,7 @@ def process(*, section: str, dir_name: str, input_name: str = '', status: int =
|
|||
# pause and let CouchPotatoServer/Radarr catch its breath
|
||||
time.sleep(10 * wait_for)
|
||||
# The status hasn't changed. we have waited wait_for minutes which is more than enough. uTorrent can resume seeding now.
|
||||
if section == 'Radarr' and completed_download_handling(url2, headers, section=section):
|
||||
if section == 'Radarr' and completed_download_handling(url2, headers):
|
||||
log.debug(f'The Scan command did not return status completed, but complete Download Handling is enabled. Passing back to {section}.')
|
||||
return ProcessResult.success(f'{section}: Complete DownLoad Handling is enabled. Passing back to {section}')
|
||||
log.warning(f'{input_name} does not appear to have changed status after {wait_for} minutes, Please check your logs.')
|
||||
|
|
|
@ -67,16 +67,16 @@ def process(*, section: str, dir_name: str, input_name: str = '', status: int =
|
|||
extract_files(dir_name)
|
||||
input_name, dir_name = convert_to_ascii(input_name, dir_name)
|
||||
# if listMediaFiles(dir_name, media=False, audio=True, meta=False, archives=False) and status:
|
||||
# logger.info('Status shown as failed from Downloader, but valid video files found. Setting as successful.', section)
|
||||
# logger.info('Status shown as failed from Downloader, but valid video files found. Setting as successful.', SECTION)
|
||||
# status = 0
|
||||
if status == 0 and section == 'HeadPhones':
|
||||
params = {'apikey': apikey, 'cmd': 'forceProcess', 'dir': remote_dir(dir_name) if remote_path else dir_name}
|
||||
res = force_process(params, url, apikey, input_name, dir_name, section, wait_for)
|
||||
if res.status_code in [0, 1]:
|
||||
if res.status_code in {0, 1}:
|
||||
return res
|
||||
params = {'apikey': apikey, 'cmd': 'forceProcess', 'dir': os.path.split(remote_dir(dir_name))[0] if remote_path else os.path.split(dir_name)[0]}
|
||||
res = force_process(params, url, apikey, input_name, dir_name, section, wait_for)
|
||||
if res.status_code in [0, 1]:
|
||||
if res.status_code in {0, 1}:
|
||||
return res
|
||||
# The status hasn't changed. uTorrent can resume seeding now.
|
||||
log.warning(f'The music album does not appear to have changed status after {wait_for} minutes. Please check your Logs')
|
||||
|
@ -110,7 +110,7 @@ def process(*, section: str, dir_name: str, input_name: str = '', status: int =
|
|||
while num < 6: # set up wait_for minutes to see if command completes..
|
||||
time.sleep(10 * wait_for)
|
||||
command_status = command_complete(url, params, headers, section)
|
||||
if command_status and command_status in ['completed', 'failed']:
|
||||
if command_status and command_status in {'completed', 'failed'}:
|
||||
break
|
||||
num += 1
|
||||
if command_status:
|
||||
|
@ -118,12 +118,12 @@ def process(*, section: str, dir_name: str, input_name: str = '', status: int =
|
|||
if not os.path.exists(dir_name):
|
||||
log.debug(f'The directory {dir_name} has been removed. Renaming was successful.')
|
||||
return ProcessResult.success(f'{section}: Successfully post-processed {input_name}')
|
||||
if command_status and command_status in ['completed']:
|
||||
if command_status and command_status in {'completed'}:
|
||||
log.debug('The Scan command has completed successfully. Renaming was successful.')
|
||||
return ProcessResult.success(f'{section}: Successfully post-processed {input_name}')
|
||||
if command_status and command_status in ['failed']:
|
||||
if command_status and command_status in {'failed'}:
|
||||
log.debug('The Scan command has failed. Renaming was not successful.')
|
||||
# return ProcessResult.failure(f'{section}: Failed to post-process {input_name}')
|
||||
# return ProcessResult.failure(f'{SECTION}: Failed to post-process {input_name}')
|
||||
else:
|
||||
log.debug(f'The Scan command did not return status completed. Passing back to {section} to attempt complete download handling.')
|
||||
return ProcessResult(message=f'{section}: Passing back to {section} to attempt Complete Download Handling', status_code=status)
|
||||
|
|
|
@ -105,7 +105,7 @@ def process(*, section: str, dir_name: str, input_name: str = '', status: int =
|
|||
# Re-raise the error if it wasn't about the directory not existing
|
||||
if error.errno != errno.EEXIST:
|
||||
raise
|
||||
if 'process_method' not in fork_params or (client_agent in ['nzbget', 'sabnzbd'] and nzb_extraction_by != 'Destination'):
|
||||
if 'process_method' not in fork_params or (client_agent in {'nzbget', 'sabnzbd'} and nzb_extraction_by != 'Destination'):
|
||||
if input_name:
|
||||
process_all_exceptions(input_name, dir_name)
|
||||
input_name, dir_name = convert_to_ascii(input_name, dir_name)
|
||||
|
@ -185,8 +185,7 @@ def process(*, section: str, dir_name: str, input_name: str = '', status: int =
|
|||
fork_params['nzbName'] = input_name
|
||||
for param in copy.copy(fork_params):
|
||||
if param == 'failed':
|
||||
if status > 1:
|
||||
status = 1
|
||||
status = min(status, 1)
|
||||
fork_params[param] = status
|
||||
if 'proc_type' in fork_params:
|
||||
del fork_params['proc_type']
|
||||
|
@ -201,7 +200,7 @@ def process(*, section: str, dir_name: str, input_name: str = '', status: int =
|
|||
fork_params[param] = 'manual'
|
||||
if 'proc_type' in fork_params:
|
||||
del fork_params['proc_type']
|
||||
if param in ['dir_name', 'dir', 'proc_dir', 'process_directory', 'path']:
|
||||
if param in {'dir_name', 'dir', 'proc_dir', 'process_directory', 'path'}:
|
||||
fork_params[param] = dir_name
|
||||
if remote_path:
|
||||
fork_params[param] = remote_dir(dir_name)
|
||||
|
@ -210,12 +209,12 @@ def process(*, section: str, dir_name: str, input_name: str = '', status: int =
|
|||
fork_params[param] = process_method
|
||||
else:
|
||||
del fork_params[param]
|
||||
if param in ['force', 'force_replace']:
|
||||
if param in {'force', 'force_replace'}:
|
||||
if force:
|
||||
fork_params[param] = force
|
||||
else:
|
||||
del fork_params[param]
|
||||
if param in ['delete_on', 'delete']:
|
||||
if param in {'delete_on', 'delete'}:
|
||||
if delete_on:
|
||||
fork_params[param] = delete_on
|
||||
else:
|
||||
|
@ -228,7 +227,9 @@ def process(*, section: str, dir_name: str, input_name: str = '', status: int =
|
|||
if param == 'force_next':
|
||||
fork_params[param] = 1
|
||||
# delete any unused params so we don't pass them to SB by mistake
|
||||
[fork_params.pop(k) for k, v in list(fork_params.items()) if v is None]
|
||||
for key, val in list(fork_params.items()):
|
||||
if val is None:
|
||||
del fork_params[key]
|
||||
if status == 0:
|
||||
if section == 'NzbDrone' and not apikey:
|
||||
log.info('No Sonarr apikey entered. Processing completed.')
|
||||
|
@ -291,7 +292,7 @@ def process(*, section: str, dir_name: str, input_name: str = '', status: int =
|
|||
login = f'{web_root}/login'
|
||||
login_params = {'username': username, 'password': password}
|
||||
response = session.get(login, verify=False, timeout=(30, 60))
|
||||
if response.status_code in [401, 403] and response.cookies.get('_xsrf'):
|
||||
if response.status_code in {401, 403} and response.cookies.get('_xsrf'):
|
||||
login_params['_xsrf'] = response.cookies.get('_xsrf')
|
||||
session.post(login, data=login_params, stream=True, verify=False, timeout=(30, 60))
|
||||
response = session.get(url, auth=(username, password), params=fork_params, stream=True, verify=False, timeout=(30, 1800))
|
||||
|
@ -362,7 +363,7 @@ def process(*, section: str, dir_name: str, input_name: str = '', status: int =
|
|||
while num < 6: # set up wait_for minutes to see if command completes..
|
||||
time.sleep(10 * wait_for)
|
||||
command_status = command_complete(url, params, headers, section)
|
||||
if command_status and command_status in ['completed', 'failed']:
|
||||
if command_status and command_status in {'completed', 'failed'}:
|
||||
break
|
||||
num += 1
|
||||
if command_status:
|
||||
|
@ -370,14 +371,14 @@ def process(*, section: str, dir_name: str, input_name: str = '', status: int =
|
|||
if not os.path.exists(dir_name):
|
||||
log.debug(f'The directory {dir_name} has been removed. Renaming was successful.')
|
||||
return ProcessResult.success(f'{section}: Successfully post-processed {input_name}')
|
||||
if command_status and command_status in ['completed']:
|
||||
if command_status and command_status in {'completed'}:
|
||||
log.debug('The Scan command has completed successfully. Renaming was successful.')
|
||||
return ProcessResult.success(f'{section}: Successfully post-processed {input_name}')
|
||||
if command_status and command_status in ['failed']:
|
||||
if command_status and command_status in {'failed'}:
|
||||
log.debug('The Scan command has failed. Renaming was not successful.')
|
||||
# return ProcessResult.failure(f'{section}: Failed to post-process {input_name}')
|
||||
# return ProcessResult.failure(f'{SECTION}: Failed to post-process {input_name}')
|
||||
url2 = nzb2media.utils.common.create_url(scheme, host, port, route2)
|
||||
if completed_download_handling(url2, headers, section=section):
|
||||
if completed_download_handling(url2, headers):
|
||||
log.debug(f'The Scan command did not return status completed, but complete Download Handling is enabled. Passing back to {section}.')
|
||||
return ProcessResult(message=f'{section}: Complete DownLoad Handling is enabled. Passing back to {section}', status_code=status)
|
||||
log.warning('The Scan command did not return a valid status. Renaming was not successful.')
|
||||
|
|
|
@ -39,6 +39,7 @@ class Section(configobj.Section):
|
|||
for key in [k for (k, v) in to_return.items() if not v]:
|
||||
del to_return[key]
|
||||
return to_return
|
||||
return None
|
||||
|
||||
def findsection(self, key):
|
||||
to_return = copy.deepcopy(self)
|
||||
|
@ -82,7 +83,7 @@ class Section(configobj.Section):
|
|||
|
||||
|
||||
class ConfigObj(configobj.ConfigObj, Section):
|
||||
def __init__(self, infile=None, *args, **kw):
|
||||
def __init__(self, infile, *args, **kw):
|
||||
if infile is None:
|
||||
infile = nzb2media.CONFIG_FILE
|
||||
super().__init__(os.fspath(infile), *args, **kw)
|
||||
|
@ -130,7 +131,7 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
if CFG_OLD[section].sections:
|
||||
subsections.update({section: CFG_OLD[section].sections})
|
||||
for option, value in CFG_OLD[section].items():
|
||||
if option in ['category', 'cpsCategory', 'sbCategory', 'srCategory', 'hpCategory', 'mlCategory', 'gzCategory', 'raCategory', 'ndCategory', 'W3Category']:
|
||||
if option in {'category', 'cpsCategory', 'sbCategory', 'srCategory', 'hpCategory', 'mlCategory', 'gzCategory', 'raCategory', 'ndCategory', 'W3Category'}:
|
||||
if not isinstance(value, list):
|
||||
value = [value]
|
||||
# add subsection
|
||||
|
@ -140,31 +141,31 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
|
||||
def cleanup_values(values, section):
|
||||
for option, value in values.items():
|
||||
if section in ['CouchPotato']:
|
||||
if section in {'CouchPotato'}:
|
||||
if option == ['outputDirectory']:
|
||||
CFG_NEW['Torrent'][option] = os.path.split(os.path.normpath(value))[0]
|
||||
values.pop(option)
|
||||
if section in ['CouchPotato', 'HeadPhones', 'Gamez', 'Mylar']:
|
||||
if option in ['username', 'password']:
|
||||
if section in {'CouchPotato', 'HeadPhones', 'Gamez', 'Mylar'}:
|
||||
if option in {'username', 'password'}:
|
||||
values.pop(option)
|
||||
if section in ['Mylar']:
|
||||
if section in {'Mylar'}:
|
||||
if option == 'wait_for': # remove old format
|
||||
values.pop(option)
|
||||
if section in ['SickBeard', 'NzbDrone']:
|
||||
if section in {'SickBeard', 'NzbDrone'}:
|
||||
if option == 'failed_fork': # change this old format
|
||||
values['failed'] = 'auto'
|
||||
values.pop(option)
|
||||
if option == 'outputDirectory': # move this to new location format
|
||||
CFG_NEW['Torrent'][option] = os.path.split(os.path.normpath(value))[0]
|
||||
values.pop(option)
|
||||
if section in ['Torrent']:
|
||||
if option in ['compressedExtensions', 'mediaExtensions', 'metaExtensions', 'minSampleSize']:
|
||||
if section in {'Torrent'}:
|
||||
if option in {'compressedExtensions', 'mediaExtensions', 'metaExtensions', 'minSampleSize'}:
|
||||
CFG_NEW['Extensions'][option] = value
|
||||
values.pop(option)
|
||||
if option == 'useLink': # Sym links supported now as well.
|
||||
if value in ['1', 1]:
|
||||
if value in {'1', 1}:
|
||||
value = 'hard'
|
||||
elif value in ['0', 0]:
|
||||
elif value in {'0', 0}:
|
||||
value = 'no'
|
||||
values[option] = value
|
||||
if option == 'forceClean':
|
||||
|
@ -173,8 +174,8 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
if option == 'qBittorrenHost': # We had a typo that is now fixed.
|
||||
CFG_NEW['Torrent']['qBittorrentHost'] = value
|
||||
values.pop(option)
|
||||
if section in ['Transcoder']:
|
||||
if option in ['niceness']:
|
||||
if section in {'Transcoder'}:
|
||||
if option in {'niceness'}:
|
||||
CFG_NEW['Posix'][option] = value
|
||||
values.pop(option)
|
||||
if option == 'remote_path':
|
||||
|
@ -225,7 +226,7 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
process_section(section, subsection)
|
||||
elif section in CFG_OLD.keys():
|
||||
process_section(section, subsection)
|
||||
# migrate SiCRKAGE settings from SickBeard section to new dedicated SiCRKAGE section
|
||||
# migrate SiCRKAGE settings from SickBeard SECTION to new dedicated SiCRKAGE SECTION
|
||||
if CFG_OLD['SickBeard']['tv']['enabled'] and CFG_OLD['SickBeard']['tv']['fork'] == 'sickrage-api':
|
||||
for option, value in CFG_OLD['SickBeard']['tv'].items():
|
||||
if option in CFG_NEW['SiCKRAGE']['tv']:
|
||||
|
@ -233,7 +234,7 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
# set API version to 1 if API key detected and no SSO username is set
|
||||
if CFG_NEW['SiCKRAGE']['tv']['apikey'] and not CFG_NEW['SiCKRAGE']['tv']['sso_username']:
|
||||
CFG_NEW['SiCKRAGE']['tv']['api_version'] = 1
|
||||
# disable SickBeard section
|
||||
# disable SickBeard SECTION
|
||||
CFG_NEW['SickBeard']['tv']['enabled'] = 0
|
||||
CFG_NEW['SickBeard']['tv']['fork'] = 'auto'
|
||||
# create a backup of our old config
|
||||
|
@ -247,23 +248,33 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
@staticmethod
|
||||
def addnzbget():
|
||||
# load configs into memory
|
||||
cfg_new = Config()
|
||||
cfg_new = Config(None)
|
||||
try:
|
||||
if 'NZBPO_NDCATEGORY' in os.environ and 'NZBPO_SBCATEGORY' in os.environ:
|
||||
if os.environ['NZBPO_NDCATEGORY'] == os.environ['NZBPO_SBCATEGORY']:
|
||||
log.warning('{x} category is set for SickBeard and Sonarr. Please check your config in NZBGet'.format(x=os.environ['NZBPO_NDCATEGORY']))
|
||||
_cat = os.environ['NZBPO_NDCATEGORY']
|
||||
msg = f'{_cat} category is set for SickBeard and Sonarr. Please check your config in NZBGet'
|
||||
log.warning(msg)
|
||||
if 'NZBPO_RACATEGORY' in os.environ and 'NZBPO_CPSCATEGORY' in os.environ:
|
||||
if os.environ['NZBPO_RACATEGORY'] == os.environ['NZBPO_CPSCATEGORY']:
|
||||
log.warning('{x} category is set for CouchPotato and Radarr. Please check your config in NZBGet'.format(x=os.environ['NZBPO_RACATEGORY']))
|
||||
_cat = os.environ['NZBPO_RACATEGORY']
|
||||
msg = f'{_cat} category is set for CouchPotato and Radarr. Please check your config in NZBGet'
|
||||
log.warning(msg)
|
||||
if 'NZBPO_RACATEGORY' in os.environ and 'NZBPO_W3CATEGORY' in os.environ:
|
||||
if os.environ['NZBPO_RACATEGORY'] == os.environ['NZBPO_W3CATEGORY']:
|
||||
log.warning('{x} category is set for Watcher3 and Radarr. Please check your config in NZBGet'.format(x=os.environ['NZBPO_RACATEGORY']))
|
||||
_cat = os.environ['NZBPO_RACATEGORY']
|
||||
msg = f'{_cat} category is set for Watcher3 and Radarr. Please check your config in NZBGet'
|
||||
log.warning(msg)
|
||||
if 'NZBPO_W3CATEGORY' in os.environ and 'NZBPO_CPSCATEGORY' in os.environ:
|
||||
if os.environ['NZBPO_W3CATEGORY'] == os.environ['NZBPO_CPSCATEGORY']:
|
||||
log.warning('{x} category is set for CouchPotato and Watcher3. Please check your config in NZBGet'.format(x=os.environ['NZBPO_W3CATEGORY']))
|
||||
_cat = os.environ['NZBPO_W3CATEGORY']
|
||||
msg = f'{_cat} category is set for CouchPotato and Watcher3. Please check your config in NZBGet'
|
||||
log.warning(msg)
|
||||
if 'NZBPO_LICATEGORY' in os.environ and 'NZBPO_HPCATEGORY' in os.environ:
|
||||
if os.environ['NZBPO_LICATEGORY'] == os.environ['NZBPO_HPCATEGORY']:
|
||||
log.warning('{x} category is set for HeadPhones and Lidarr. Please check your config in NZBGet'.format(x=os.environ['NZBPO_LICATEGORY']))
|
||||
_cat = os.environ['NZBPO_LICATEGORY']
|
||||
msg = f'{_cat} category is set for HeadPhones and Lidarr. Please check your config in NZBGet'
|
||||
log.warning(msg)
|
||||
section = 'Nzb'
|
||||
key = 'NZBOP_DESTDIR'
|
||||
if key in os.environ:
|
||||
|
@ -273,8 +284,8 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
section = 'General'
|
||||
env_keys = ['AUTO_UPDATE', 'CHECK_MEDIA', 'REQUIRE_LAN', 'SAFE_MODE', 'NO_EXTRACT_FAILED']
|
||||
cfg_keys = ['auto_update', 'check_media', 'require_lan', 'safe_mode', 'no_extract_failed']
|
||||
for index in range(len(env_keys)):
|
||||
key = f'NZBPO_{env_keys[index]}'
|
||||
for index, env_key in enumerate(env_keys):
|
||||
key = f'NZBPO_{env_key}'
|
||||
if key in os.environ:
|
||||
option = cfg_keys[index]
|
||||
value = os.environ[key]
|
||||
|
@ -282,8 +293,8 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
section = 'Network'
|
||||
env_keys = ['MOUNTPOINTS']
|
||||
cfg_keys = ['mount_points']
|
||||
for index in range(len(env_keys)):
|
||||
key = f'NZBPO_{env_keys[index]}'
|
||||
for index, env_key in enumerate(env_keys):
|
||||
key = f'NZBPO_{env_key}'
|
||||
if key in os.environ:
|
||||
option = cfg_keys[index]
|
||||
value = os.environ[key]
|
||||
|
@ -293,8 +304,8 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
env_keys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'METHOD', 'DELETE_FAILED', 'REMOTE_PATH', 'WAIT_FOR', 'WATCH_DIR', 'OMDBAPIKEY']
|
||||
cfg_keys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'method', 'delete_failed', 'remote_path', 'wait_for', 'watch_dir', 'omdbapikey']
|
||||
if env_cat_key in os.environ:
|
||||
for index in range(len(env_keys)):
|
||||
key = f'NZBPO_CPS{env_keys[index]}'
|
||||
for index, env_key in enumerate(env_keys):
|
||||
key = f'NZBPO_CPS{env_key}'
|
||||
if key in os.environ:
|
||||
option = cfg_keys[index]
|
||||
value = os.environ[key]
|
||||
|
@ -311,8 +322,8 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
env_keys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'METHOD', 'DELETE_FAILED', 'REMOTE_PATH', 'WAIT_FOR', 'WATCH_DIR', 'OMDBAPIKEY']
|
||||
cfg_keys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'method', 'delete_failed', 'remote_path', 'wait_for', 'watch_dir', 'omdbapikey']
|
||||
if env_cat_key in os.environ:
|
||||
for index in range(len(env_keys)):
|
||||
key = f'NZBPO_W3{env_keys[index]}'
|
||||
for index, env_key in enumerate(env_keys):
|
||||
key = f'NZBPO_W3{env_key}'
|
||||
if key in os.environ:
|
||||
option = cfg_keys[index]
|
||||
value = os.environ[key]
|
||||
|
@ -329,8 +340,8 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
env_keys = ['ENABLED', 'HOST', 'PORT', 'APIKEY', 'USERNAME', 'PASSWORD', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED', 'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'REMOTE_PATH', 'PROCESS_METHOD']
|
||||
cfg_keys = ['enabled', 'host', 'port', 'apikey', 'username', 'password', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed', 'Torrent_NoLink', 'nzbExtractionBy', 'remote_path', 'process_method']
|
||||
if env_cat_key in os.environ:
|
||||
for index in range(len(env_keys)):
|
||||
key = f'NZBPO_SB{env_keys[index]}'
|
||||
for index, env_key in enumerate(env_keys):
|
||||
key = f'NZBPO_SB{env_key}'
|
||||
if key in os.environ:
|
||||
option = cfg_keys[index]
|
||||
value = os.environ[key]
|
||||
|
@ -347,8 +358,8 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
env_keys = ['ENABLED', 'HOST', 'PORT', 'APIKEY', 'API_VERSION', 'SSO_USERNAME', 'SSO_PASSWORD', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED', 'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'REMOTE_PATH', 'PROCESS_METHOD']
|
||||
cfg_keys = ['enabled', 'host', 'port', 'apikey', 'api_version', 'sso_username', 'sso_password', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed', 'Torrent_NoLink', 'nzbExtractionBy', 'remote_path', 'process_method']
|
||||
if env_cat_key in os.environ:
|
||||
for index in range(len(env_keys)):
|
||||
key = f'NZBPO_SR{env_keys[index]}'
|
||||
for index, env_key in enumerate(env_keys):
|
||||
key = f'NZBPO_SR{env_key}'
|
||||
if key in os.environ:
|
||||
option = cfg_keys[index]
|
||||
value = os.environ[key]
|
||||
|
@ -365,8 +376,8 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
env_keys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'WAIT_FOR', 'WATCH_DIR', 'REMOTE_PATH', 'DELETE_FAILED']
|
||||
cfg_keys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'wait_for', 'watch_dir', 'remote_path', 'delete_failed']
|
||||
if env_cat_key in os.environ:
|
||||
for index in range(len(env_keys)):
|
||||
key = f'NZBPO_HP{env_keys[index]}'
|
||||
for index, env_key in enumerate(env_keys):
|
||||
key = f'NZBPO_HP{env_key}'
|
||||
if key in os.environ:
|
||||
option = cfg_keys[index]
|
||||
value = os.environ[key]
|
||||
|
@ -381,8 +392,8 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
env_keys = ['ENABLED', 'HOST', 'PORT', 'USERNAME', 'PASSWORD', 'APIKEY', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'REMOTE_PATH']
|
||||
cfg_keys = ['enabled', 'host', 'port', 'username', 'password', 'apikey', 'ssl', 'web_root', 'watch_dir', 'remote_path']
|
||||
if env_cat_key in os.environ:
|
||||
for index in range(len(env_keys)):
|
||||
key = f'NZBPO_MY{env_keys[index]}'
|
||||
for index, env_key in enumerate(env_keys):
|
||||
key = f'NZBPO_MY{env_key}'
|
||||
if key in os.environ:
|
||||
option = cfg_keys[index]
|
||||
value = os.environ[key]
|
||||
|
@ -395,8 +406,8 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
env_keys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'LIBRARY', 'REMOTE_PATH']
|
||||
cfg_keys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'watch_dir', 'library', 'remote_path']
|
||||
if env_cat_key in os.environ:
|
||||
for index in range(len(env_keys)):
|
||||
key = f'NZBPO_GZ{env_keys[index]}'
|
||||
for index, env_key in enumerate(env_keys):
|
||||
key = f'NZBPO_GZ{env_key}'
|
||||
if key in os.environ:
|
||||
option = cfg_keys[index]
|
||||
value = os.environ[key]
|
||||
|
@ -409,8 +420,8 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
env_keys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'REMOTE_PATH']
|
||||
cfg_keys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'watch_dir', 'remote_path']
|
||||
if env_cat_key in os.environ:
|
||||
for index in range(len(env_keys)):
|
||||
key = f'NZBPO_LL{env_keys[index]}'
|
||||
for index, env_key in enumerate(env_keys):
|
||||
key = f'NZBPO_LL{env_key}'
|
||||
if key in os.environ:
|
||||
option = cfg_keys[index]
|
||||
value = os.environ[key]
|
||||
|
@ -424,8 +435,8 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
# new cfgKey added for importMode
|
||||
cfg_keys = ['enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed', 'Torrent_NoLink', 'nzbExtractionBy', 'wait_for', 'delete_failed', 'remote_path', 'importMode']
|
||||
if env_cat_key in os.environ:
|
||||
for index in range(len(env_keys)):
|
||||
key = f'NZBPO_ND{env_keys[index]}'
|
||||
for index, env_key in enumerate(env_keys):
|
||||
key = f'NZBPO_ND{env_key}'
|
||||
if key in os.environ:
|
||||
option = cfg_keys[index]
|
||||
value = os.environ[key]
|
||||
|
@ -443,8 +454,8 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
# new cfgKey added for importMode
|
||||
cfg_keys = ['enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed', 'Torrent_NoLink', 'nzbExtractionBy', 'wait_for', 'delete_failed', 'remote_path', 'omdbapikey', 'importMode']
|
||||
if env_cat_key in os.environ:
|
||||
for index in range(len(env_keys)):
|
||||
key = f'NZBPO_RA{env_keys[index]}'
|
||||
for index, env_key in enumerate(env_keys):
|
||||
key = f'NZBPO_RA{env_key}'
|
||||
if key in os.environ:
|
||||
option = cfg_keys[index]
|
||||
value = os.environ[key]
|
||||
|
@ -461,8 +472,8 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
env_keys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED', 'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH']
|
||||
cfg_keys = ['enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed', 'Torrent_NoLink', 'nzbExtractionBy', 'wait_for', 'delete_failed', 'remote_path']
|
||||
if env_cat_key in os.environ:
|
||||
for index in range(len(env_keys)):
|
||||
key = f'NZBPO_LI{env_keys[index]}'
|
||||
for index, env_key in enumerate(env_keys):
|
||||
key = f'NZBPO_LI{env_key}'
|
||||
if key in os.environ:
|
||||
option = cfg_keys[index]
|
||||
value = os.environ[key]
|
||||
|
@ -475,8 +486,8 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
section = 'Extensions'
|
||||
env_keys = ['COMPRESSEDEXTENSIONS', 'MEDIAEXTENSIONS', 'METAEXTENSIONS']
|
||||
cfg_keys = ['compressedExtensions', 'mediaExtensions', 'metaExtensions']
|
||||
for index in range(len(env_keys)):
|
||||
key = f'NZBPO_{env_keys[index]}'
|
||||
for index, env_key in enumerate(env_keys):
|
||||
key = f'NZBPO_{env_key}'
|
||||
if key in os.environ:
|
||||
option = cfg_keys[index]
|
||||
value = os.environ[key]
|
||||
|
@ -484,8 +495,8 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
section = 'Posix'
|
||||
env_keys = ['NICENESS', 'IONICE_CLASS', 'IONICE_CLASSDATA']
|
||||
cfg_keys = ['niceness', 'ionice_class', 'ionice_classdata']
|
||||
for index in range(len(env_keys)):
|
||||
key = f'NZBPO_{env_keys[index]}'
|
||||
for index, env_key in enumerate(env_keys):
|
||||
key = f'NZBPO_{env_key}'
|
||||
if key in os.environ:
|
||||
option = cfg_keys[index]
|
||||
value = os.environ[key]
|
||||
|
@ -493,8 +504,8 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
section = 'Transcoder'
|
||||
env_keys = ['TRANSCODE', 'DUPLICATE', 'IGNOREEXTENSIONS', 'OUTPUTFASTSTART', 'OUTPUTVIDEOPATH', 'PROCESSOUTPUT', 'AUDIOLANGUAGE', 'ALLAUDIOLANGUAGES', 'SUBLANGUAGES', 'ALLSUBLANGUAGES', 'EMBEDSUBS', 'BURNINSUBTITLE', 'EXTRACTSUBS', 'EXTERNALSUBDIR', 'OUTPUTDEFAULT', 'OUTPUTVIDEOEXTENSION', 'OUTPUTVIDEOCODEC', 'VIDEOCODECALLOW', 'OUTPUTVIDEOPRESET', 'OUTPUTVIDEOFRAMERATE', 'OUTPUTVIDEOBITRATE', 'OUTPUTAUDIOCODEC', 'AUDIOCODECALLOW', 'OUTPUTAUDIOBITRATE', 'OUTPUTQUALITYPERCENT', 'GETSUBS', 'OUTPUTAUDIOTRACK2CODEC', 'AUDIOCODEC2ALLOW', 'OUTPUTAUDIOTRACK2BITRATE', 'OUTPUTAUDIOOTHERCODEC', 'AUDIOOTHERCODECALLOW', 'OUTPUTAUDIOOTHERBITRATE', 'OUTPUTSUBTITLECODEC', 'OUTPUTAUDIOCHANNELS', 'OUTPUTAUDIOTRACK2CHANNELS', 'OUTPUTAUDIOOTHERCHANNELS', 'OUTPUTVIDEORESOLUTION']
|
||||
cfg_keys = ['transcode', 'duplicate', 'ignoreExtensions', 'outputFastStart', 'outputVideoPath', 'processOutput', 'audioLanguage', 'allAudioLanguages', 'subLanguages', 'allSubLanguages', 'embedSubs', 'burnInSubtitle', 'extractSubs', 'externalSubDir', 'outputDefault', 'outputVideoExtension', 'outputVideoCodec', 'VideoCodecAllow', 'outputVideoPreset', 'outputVideoFramerate', 'outputVideoBitrate', 'outputAudioCodec', 'AudioCodecAllow', 'outputAudioBitrate', 'outputQualityPercent', 'getSubs', 'outputAudioTrack2Codec', 'AudioCodec2Allow', 'outputAudioTrack2Bitrate', 'outputAudioOtherCodec', 'AudioOtherCodecAllow', 'outputAudioOtherBitrate', 'outputSubtitleCodec', 'outputAudioChannels', 'outputAudioTrack2Channels', 'outputAudioOtherChannels', 'outputVideoResolution']
|
||||
for index in range(len(env_keys)):
|
||||
key = f'NZBPO_{env_keys[index]}'
|
||||
for index, env_key in enumerate(env_keys):
|
||||
key = f'NZBPO_{env_key}'
|
||||
if key in os.environ:
|
||||
option = cfg_keys[index]
|
||||
value = os.environ[key]
|
||||
|
@ -502,8 +513,8 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
section = 'WakeOnLan'
|
||||
env_keys = ['WAKE', 'HOST', 'PORT', 'MAC']
|
||||
cfg_keys = ['wake', 'host', 'port', 'mac']
|
||||
for index in range(len(env_keys)):
|
||||
key = f'NZBPO_WOL{env_keys[index]}'
|
||||
for index, env_key in enumerate(env_keys):
|
||||
key = f'NZBPO_WOL{env_key}'
|
||||
if key in os.environ:
|
||||
option = cfg_keys[index]
|
||||
value = os.environ[key]
|
||||
|
@ -513,8 +524,8 @@ class ConfigObj(configobj.ConfigObj, Section):
|
|||
env_keys = ['USER_SCRIPT_MEDIAEXTENSIONS', 'USER_SCRIPT_PATH', 'USER_SCRIPT_PARAM', 'USER_SCRIPT_RUNONCE', 'USER_SCRIPT_SUCCESSCODES', 'USER_SCRIPT_CLEAN', 'USDELAY', 'USREMOTE_PATH']
|
||||
cfg_keys = ['user_script_mediaExtensions', 'user_script_path', 'user_script_param', 'user_script_runOnce', 'user_script_successCodes', 'user_script_clean', 'delay', 'remote_path']
|
||||
if env_cat_key in os.environ:
|
||||
for index in range(len(env_keys)):
|
||||
key = f'NZBPO_{env_keys[index]}'
|
||||
for index, env_key in enumerate(env_keys):
|
||||
key = f'NZBPO_{env_key}'
|
||||
if key in os.environ:
|
||||
option = cfg_keys[index]
|
||||
value = os.environ[key]
|
||||
|
|
|
@ -61,15 +61,15 @@ def extract(file_path, output_destination):
|
|||
log.warning('EXTRACTOR: No archive extracting programs found, plugin will be disabled')
|
||||
ext = os.path.splitext(file_path)
|
||||
cmd = []
|
||||
if ext[1] in ('.gz', '.bz2', '.lzma'):
|
||||
if ext[1] in {'.gz', '.bz2', '.lzma'}:
|
||||
# Check if this is a tar
|
||||
if os.path.splitext(ext[0])[1] == '.tar':
|
||||
cmd = extract_commands[f'.tar{ext[1]}']
|
||||
else: # Try gunzip
|
||||
cmd = extract_commands[ext[1]]
|
||||
elif ext[1] in ('.1', '.01', '.001') and os.path.splitext(ext[0])[1] in ('.rar', '.zip', '.7z'):
|
||||
elif ext[1] in {'.1', '.01', '.001'} and os.path.splitext(ext[0])[1] in {'.rar', '.zip', '.7z'}:
|
||||
cmd = extract_commands[os.path.splitext(ext[0])[1]]
|
||||
elif ext[1] in ('.cb7', '.cba', '.cbr', '.cbt', '.cbz'):
|
||||
elif ext[1] in {'.cb7', '.cba', '.cbr', '.cbt', '.cbz'}:
|
||||
# don't extract these comic book archives.
|
||||
return False
|
||||
else:
|
||||
|
@ -81,7 +81,7 @@ def extract(file_path, output_destination):
|
|||
# Create outputDestination folder
|
||||
nzb2media.make_dir(output_destination)
|
||||
if nzb2media.PASSWORDS_FILE and os.path.isfile(os.path.normpath(nzb2media.PASSWORDS_FILE)):
|
||||
with open(os.path.normpath(nzb2media.PASSWORDS_FILE)) as fin:
|
||||
with open(os.path.normpath(nzb2media.PASSWORDS_FILE), encoding='utf-8') as fin:
|
||||
passwords = [line.strip() for line in fin]
|
||||
else:
|
||||
passwords = []
|
||||
|
@ -128,8 +128,6 @@ def extract(file_path, output_destination):
|
|||
log.info(f'EXTRACTOR: Extraction was successful for {file_path} to {output_destination} using password: {password}')
|
||||
success = 1
|
||||
break
|
||||
else:
|
||||
continue
|
||||
except Exception:
|
||||
log.error(f'EXTRACTOR: Extraction failed for {file_path}. Could not call command {cmd}')
|
||||
os.chdir(pwd)
|
||||
|
|
|
@ -11,9 +11,11 @@ class GitHub:
|
|||
self.github_repo = github_repo
|
||||
self.branch = branch
|
||||
|
||||
def _access_api(self, path, params=None):
|
||||
@staticmethod
|
||||
def _access_api(path, params=None):
|
||||
"""Access API at given an API path and optional parameters."""
|
||||
url = 'https://api.github.com/{path}'.format(path='/'.join(path))
|
||||
route = '/'.join(path)
|
||||
url = f'https://api.github.com/{route}'
|
||||
data = requests.get(url, params=params, verify=False)
|
||||
return data.json() if data.ok else []
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@ def db_filename(filename='nzbtomedia.db', suffix=None):
|
|||
|
||||
|
||||
class DBConnection:
|
||||
def __init__(self, filename='nzbtomedia.db', suffix=None, row_type=None):
|
||||
def __init__(self, filename='nzbtomedia.db'):
|
||||
self.filename = filename
|
||||
self.connection = sqlite3.connect(db_filename(filename), 20)
|
||||
self.connection.row_factory = sqlite3.Row
|
||||
|
@ -151,9 +151,14 @@ class DBConnection:
|
|||
|
||||
changes_before = self.connection.total_changes
|
||||
items = list(value_dict.values()) + list(key_dict.values())
|
||||
self.action('UPDATE {table} SET {params} WHERE {conditions}'.format(table=table_name, params=', '.join(gen_params(value_dict)), conditions=' AND '.join(gen_params(key_dict))), items)
|
||||
_params = ', '.join(gen_params(value_dict))
|
||||
_conditions = ' AND '.join(gen_params(key_dict))
|
||||
self.action(f'UPDATE {table_name} SET {_params} WHERE {_conditions}', items)
|
||||
if self.connection.total_changes == changes_before:
|
||||
self.action('INSERT OR IGNORE INTO {table} ({columns}) VALUES ({values})'.format(table=table_name, columns=', '.join(map(str, value_dict.keys())), values=', '.join(['?'] * len(value_dict.values()))), list(value_dict.values()))
|
||||
_cols = ', '.join(map(str, value_dict.keys()))
|
||||
values = list(value_dict.values())
|
||||
_vals = ', '.join(['?'] * len(values))
|
||||
self.action(f'INSERT OR IGNORE INTO {table_name} ({_cols}) VALUES ({_vals})', values)
|
||||
|
||||
def table_info(self, table_name):
|
||||
# FIXME ? binding is not supported here, but I cannot find a way to escape a string manually
|
||||
|
|
|
@ -58,7 +58,7 @@ class PyMedusaApiV2(SickBeard):
|
|||
# Check for an apikey
|
||||
# This is required with using fork = medusa-apiv2
|
||||
if not sb_init.apikey:
|
||||
log.error('For the section SickBeard `fork = medusa-apiv2` you also ' 'need to configure an `apikey`')
|
||||
log.error('For the SECTION SickBeard `fork = medusa-apiv2` you also ' 'need to configure an `apikey`')
|
||||
raise ValueError('Missing apikey for fork: medusa-apiv2')
|
||||
|
||||
@property
|
||||
|
|
|
@ -49,7 +49,7 @@ class InitSickBeard:
|
|||
self.protocol = 'https://' if self.ssl else 'http://'
|
||||
|
||||
def auto_fork(self):
|
||||
# auto-detect correct section
|
||||
# auto-detect correct SECTION
|
||||
# config settings
|
||||
if nzb2media.FORK_SET:
|
||||
# keep using determined fork for multiple (manual) post-processing
|
||||
|
@ -159,7 +159,7 @@ class InitSickBeard:
|
|||
login = nzb2media.utils.common.create_url(scheme=self.protocol, host=self.host, port=self.port, path=f'{self.web_root}/login')
|
||||
login_params = {'username': self.username, 'password': self.password}
|
||||
response = session.get(login, verify=False, timeout=(30, 60))
|
||||
if response.status_code in [401, 403] and response.cookies.get('_xsrf'):
|
||||
if response.status_code in {401, 403} and response.cookies.get('_xsrf'):
|
||||
login_params['_xsrf'] = response.cookies.get('_xsrf')
|
||||
session.post(login, data=login_params, stream=True, verify=False)
|
||||
response = session.get(url, auth=(self.username, self.password), params=api_params, verify=False)
|
||||
|
@ -274,8 +274,7 @@ class SickBeard:
|
|||
fork_params['nzbName'] = self.input_name
|
||||
for param in copy.copy(fork_params):
|
||||
if param == 'failed':
|
||||
if self.failed > 1:
|
||||
self.failed = 1
|
||||
self.failed = min(self.failed, 1)
|
||||
fork_params[param] = self.failed
|
||||
if 'proc_type' in fork_params:
|
||||
del fork_params['proc_type']
|
||||
|
@ -291,7 +290,7 @@ class SickBeard:
|
|||
fork_params[param] = 'manual'
|
||||
if 'proc_type' in fork_params:
|
||||
del fork_params['proc_type']
|
||||
if param in ['dir_name', 'dir', 'proc_dir', 'process_directory', 'path']:
|
||||
if param in {'dir_name', 'dir', 'proc_dir', 'process_directory', 'path'}:
|
||||
fork_params[param] = self.dir_name
|
||||
if self.remote_path:
|
||||
fork_params[param] = remote_dir(self.dir_name)
|
||||
|
@ -303,12 +302,12 @@ class SickBeard:
|
|||
fork_params[param] = self.process_method
|
||||
else:
|
||||
del fork_params[param]
|
||||
if param in ['force', 'force_replace']:
|
||||
if param in {'force', 'force_replace'}:
|
||||
if self.force:
|
||||
fork_params[param] = self.force
|
||||
else:
|
||||
del fork_params[param]
|
||||
if param in ['delete_on', 'delete']:
|
||||
if param in {'delete_on', 'delete'}:
|
||||
if self.delete_on:
|
||||
fork_params[param] = self.delete_on
|
||||
else:
|
||||
|
@ -326,7 +325,9 @@ class SickBeard:
|
|||
if param == 'force_next':
|
||||
fork_params[param] = 1
|
||||
# delete any unused params so we don't pass them to SB by mistake
|
||||
[fork_params.pop(k) for k, v in list(fork_params.items()) if v is None]
|
||||
for key, value in list(fork_params.items()):
|
||||
if value is None:
|
||||
del fork_params[key]
|
||||
|
||||
def api_call(self) -> ProcessResult:
|
||||
"""Perform a base sickbeard api call."""
|
||||
|
@ -339,7 +340,7 @@ class SickBeard:
|
|||
login = nzb2media.utils.common.create_url(self.sb_init.protocol, self.sb_init.host, self.sb_init.port, route)
|
||||
login_params = {'username': self.sb_init.username, 'password': self.sb_init.password}
|
||||
response = self.session.get(login, verify=False, timeout=(30, 60))
|
||||
if response.status_code in [401, 403] and response.cookies.get('_xsrf'):
|
||||
if response.status_code in {401, 403} and response.cookies.get('_xsrf'):
|
||||
login_params['_xsrf'] = response.cookies.get('_xsrf')
|
||||
self.session.post(login, data=login_params, stream=True, verify=False, timeout=(30, 60))
|
||||
response = self.session.get(self.url, auth=(self.sb_init.username, self.sb_init.password), params=self.sb_init.fork_params, stream=True, verify=False, timeout=(30, 1800))
|
||||
|
|
|
@ -26,7 +26,8 @@ def configure_plex(config):
|
|||
def plex_update(category):
|
||||
if nzb2media.FAILED:
|
||||
return
|
||||
url = '{scheme}://{host}:{port}/library/sections/'.format(scheme='https' if nzb2media.PLEX_SSL else 'http', host=nzb2media.PLEX_HOST, port=nzb2media.PLEX_PORT)
|
||||
scheme = 'https' if nzb2media.PLEX_SSL else 'http'
|
||||
url = f'{scheme}://{nzb2media.PLEX_HOST}:{nzb2media.PLEX_PORT}/library/sections/'
|
||||
section = None
|
||||
if not nzb2media.PLEX_SECTION:
|
||||
return
|
||||
|
@ -39,4 +40,4 @@ def plex_update(category):
|
|||
requests.get(url, timeout=(60, 120), verify=False)
|
||||
log.debug('Plex Library has been refreshed.')
|
||||
else:
|
||||
log.debug('Could not identify section for plex update')
|
||||
log.debug('Could not identify SECTION for plex update')
|
||||
|
|
|
@ -44,7 +44,7 @@ def process(input_directory, input_name=None, status=0, client_agent='manual', d
|
|||
control_value_dict = {'input_directory': input_directory1}
|
||||
new_value_dict = {'input_name': input_name1, 'input_hash': download_id, 'input_id': download_id, 'client_agent': client_agent, 'status': 0, 'last_update': datetime.date.today().toordinal()}
|
||||
my_db.upsert('downloads', new_value_dict, control_value_dict)
|
||||
# auto-detect section
|
||||
# auto-detect SECTION
|
||||
if input_category is None:
|
||||
input_category = 'UNCAT'
|
||||
usercat = input_category
|
||||
|
|
|
@ -19,8 +19,7 @@ def parse_download_id():
|
|||
return os.environ[download_id_key]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
return ''
|
||||
return ''
|
||||
|
||||
|
||||
def parse_failure_link():
|
||||
|
@ -40,7 +39,7 @@ def _parse_total_status():
|
|||
def _parse_par_status():
|
||||
"""Parse nzbget par status from environment."""
|
||||
par_status = os.environ['NZBPP_PARSTATUS']
|
||||
if par_status == '1' or par_status == '4':
|
||||
if par_status in {'1', '4'}:
|
||||
log.warning('Par-repair failed, setting status \'failed\'')
|
||||
return 1
|
||||
return 0
|
||||
|
|
|
@ -84,7 +84,7 @@ def replace_filename(filename, dirname, name):
|
|||
return newfile_path
|
||||
|
||||
|
||||
def reverse_filename(filename, dirname, name):
|
||||
def reverse_filename(filename, dirname):
|
||||
head, file_extension = os.path.splitext(os.path.basename(filename))
|
||||
na_parts = season_pattern.search(head)
|
||||
if na_parts is not None:
|
||||
|
@ -118,7 +118,7 @@ def rename_script(dirname):
|
|||
dirname = directory
|
||||
break
|
||||
if rename_file:
|
||||
with open(rename_file) as fin:
|
||||
with open(rename_file, encoding='utf-8') as fin:
|
||||
rename_lines = [line.strip() for line in fin]
|
||||
for line in rename_lines:
|
||||
if re.search('^(mv|Move)', line, re.IGNORECASE):
|
||||
|
|
|
@ -20,5 +20,6 @@ def configure_client():
|
|||
client = UTorrentClient(web_ui, user, password)
|
||||
except Exception:
|
||||
log.error('Failed to connect to uTorrent')
|
||||
return None
|
||||
else:
|
||||
return client
|
||||
|
|
|
@ -26,7 +26,7 @@ __author__ = 'Justin'
|
|||
def is_video_good(video: pathlib.Path, status, require_lan=None):
|
||||
file_ext = video.suffix
|
||||
disable = False
|
||||
if file_ext not in nzb2media.MEDIA_CONTAINER or not nzb2media.FFPROBE or not nzb2media.CHECK_MEDIA or file_ext in ['.iso'] or (status > 0 and nzb2media.NOEXTRACTFAILED):
|
||||
if file_ext not in nzb2media.MEDIA_CONTAINER or not nzb2media.FFPROBE or not nzb2media.CHECK_MEDIA or file_ext in {'.iso'} or (status > 0 and nzb2media.NOEXTRACTFAILED):
|
||||
disable = True
|
||||
else:
|
||||
test_details, res = get_video_details(nzb2media.TEST_FILE)
|
||||
|
@ -205,7 +205,7 @@ def build_commands(file, new_dir, movie_name):
|
|||
video_cmd.extend(['-level', str(nzb2media.VLEVEL)])
|
||||
if nzb2media.ACODEC:
|
||||
audio_cmd.extend(['-c:a', nzb2media.ACODEC])
|
||||
if nzb2media.ACODEC in ['aac', 'dts']:
|
||||
if nzb2media.ACODEC in {'aac', 'dts'}:
|
||||
# Allow users to use the experimental AAC codec that's built into recent versions of ffmpeg
|
||||
audio_cmd.extend(['-strict', '-2'])
|
||||
else:
|
||||
|
@ -240,17 +240,21 @@ def build_commands(file, new_dir, movie_name):
|
|||
video_cmd.extend(['-c:v', 'copy'])
|
||||
else:
|
||||
video_cmd.extend(['-c:v', nzb2media.VCODEC])
|
||||
if nzb2media.VFRAMERATE and not (nzb2media.VFRAMERATE * 0.999 <= frame_rate <= nzb2media.VFRAMERATE * 1.001):
|
||||
if nzb2media.VFRAMERATE and not nzb2media.VFRAMERATE * 0.999 <= frame_rate <= nzb2media.VFRAMERATE * 1.001:
|
||||
video_cmd.extend(['-r', str(nzb2media.VFRAMERATE)])
|
||||
if scale:
|
||||
w_scale = width / float(scale.split(':')[0])
|
||||
h_scale = height / float(scale.split(':')[1])
|
||||
if w_scale > h_scale: # widescreen, Scale by width only.
|
||||
scale = '{width}:{height}'.format(width=scale.split(':')[0], height=int((height / w_scale) / 2) * 2)
|
||||
_width = scale.split(':')[0]
|
||||
_height = int((height / w_scale) / 2) * 2
|
||||
scale = f'{_width}:{_height}'
|
||||
if w_scale > 1:
|
||||
video_cmd.extend(['-vf', f'scale={scale}'])
|
||||
else: # lower or matching ratio, scale by height only.
|
||||
scale = '{width}:{height}'.format(width=int((width / h_scale) / 2) * 2, height=scale.split(':')[1])
|
||||
_width = int((width / h_scale) / 2) * 2
|
||||
_height = scale.split(':')[1]
|
||||
scale = f'{_width}:{_height}'
|
||||
if h_scale > 1:
|
||||
video_cmd.extend(['-vf', f'scale={scale}'])
|
||||
if nzb2media.VBITRATE:
|
||||
|
@ -266,7 +270,8 @@ def build_commands(file, new_dir, movie_name):
|
|||
video_cmd[1] = nzb2media.VCODEC
|
||||
if nzb2media.VCODEC == 'copy': # force copy. therefore ignore all other video transcoding.
|
||||
video_cmd = ['-c:v', 'copy']
|
||||
map_cmd.extend(['-map', '0:{index}'.format(index=video['index'])])
|
||||
_index = video['index']
|
||||
map_cmd.extend(['-map', f'0:{_index}'])
|
||||
break # Only one video needed
|
||||
used_audio = 0
|
||||
a_mapped = []
|
||||
|
@ -297,27 +302,31 @@ def build_commands(file, new_dir, movie_name):
|
|||
except Exception:
|
||||
audio4 = []
|
||||
if audio2: # right (or only) language and codec...
|
||||
map_cmd.extend(['-map', '0:{index}'.format(index=audio2[0]['index'])])
|
||||
_index = audio2[0]['index']
|
||||
map_cmd.extend(['-map', f'0:{_index}'])
|
||||
a_mapped.extend([audio2[0]['index']])
|
||||
bitrate = int(float(audio2[0].get('bit_rate', 0))) / 1000
|
||||
channels = int(float(audio2[0].get('channels', 0)))
|
||||
audio_cmd.extend([f'-c:a:{used_audio}', 'copy'])
|
||||
elif audio1: # right (or only) language, wrong codec.
|
||||
map_cmd.extend(['-map', '0:{index}'.format(index=audio1[0]['index'])])
|
||||
_index = audio1[0]['index']
|
||||
map_cmd.extend(['-map', f'0:{_index}'])
|
||||
a_mapped.extend([audio1[0]['index']])
|
||||
bitrate = int(float(audio1[0].get('bit_rate', 0))) / 1000
|
||||
channels = int(float(audio1[0].get('channels', 0)))
|
||||
audio_cmd.extend([f'-c:a:{used_audio}', nzb2media.ACODEC if nzb2media.ACODEC else 'copy'])
|
||||
elif audio4:
|
||||
# wrong language, right codec.
|
||||
map_cmd.extend(['-map', '0:{index}'.format(index=audio4[0]['index'])])
|
||||
_index = audio4[0]['index']
|
||||
map_cmd.extend(['-map', f'0:{_index}'])
|
||||
a_mapped.extend([audio4[0]['index']])
|
||||
bitrate = int(float(audio4[0].get('bit_rate', 0))) / 1000
|
||||
channels = int(float(audio4[0].get('channels', 0)))
|
||||
audio_cmd.extend([f'-c:a:{used_audio}', 'copy'])
|
||||
elif audio3:
|
||||
# wrong language, wrong codec. just pick the default audio track
|
||||
map_cmd.extend(['-map', '0:{index}'.format(index=audio3[0]['index'])])
|
||||
_index = audio3[0]['index']
|
||||
map_cmd.extend(['-map', f'0:{_index}'])
|
||||
a_mapped.extend([audio3[0]['index']])
|
||||
bitrate = int(float(audio3[0].get('bit_rate', 0))) / 1000
|
||||
channels = int(float(audio3[0].get('channels', 0)))
|
||||
|
@ -326,7 +335,7 @@ def build_commands(file, new_dir, movie_name):
|
|||
audio_cmd.extend([f'-ac:a:{used_audio}', str(nzb2media.ACHANNELS)])
|
||||
if audio_cmd[1] == 'copy':
|
||||
audio_cmd[1] = nzb2media.ACODEC
|
||||
if nzb2media.ABITRATE and not (nzb2media.ABITRATE * 0.9 < bitrate < nzb2media.ABITRATE * 1.1):
|
||||
if nzb2media.ABITRATE and not nzb2media.ABITRATE * 0.9 < bitrate < nzb2media.ABITRATE * 1.1:
|
||||
audio_cmd.extend([f'-b:a:{used_audio}', str(nzb2media.ABITRATE)])
|
||||
if audio_cmd[1] == 'copy':
|
||||
audio_cmd[1] = nzb2media.ACODEC
|
||||
|
@ -334,7 +343,7 @@ def build_commands(file, new_dir, movie_name):
|
|||
audio_cmd.extend([f'-q:a:{used_audio}', str(nzb2media.OUTPUTQUALITYPERCENT)])
|
||||
if audio_cmd[1] == 'copy':
|
||||
audio_cmd[1] = nzb2media.ACODEC
|
||||
if audio_cmd[1] in ['aac', 'dts']:
|
||||
if audio_cmd[1] in {'aac', 'dts'}:
|
||||
audio_cmd[2:2] = ['-strict', '-2']
|
||||
if nzb2media.ACODEC2_ALLOW:
|
||||
used_audio += 1
|
||||
|
@ -347,13 +356,15 @@ def build_commands(file, new_dir, movie_name):
|
|||
except Exception:
|
||||
audio6 = []
|
||||
if audio5: # right language and codec.
|
||||
map_cmd.extend(['-map', '0:{index}'.format(index=audio5[0]['index'])])
|
||||
_index = audio5[0]['index']
|
||||
map_cmd.extend(['-map', f'0:{_index}'])
|
||||
a_mapped.extend([audio5[0]['index']])
|
||||
bitrate = int(float(audio5[0].get('bit_rate', 0))) / 1000
|
||||
channels = int(float(audio5[0].get('channels', 0)))
|
||||
audio_cmd2.extend([f'-c:a:{used_audio}', 'copy'])
|
||||
elif audio1: # right language wrong codec.
|
||||
map_cmd.extend(['-map', '0:{index}'.format(index=audio1[0]['index'])])
|
||||
_index = audio1[0]['index']
|
||||
map_cmd.extend(['-map', f'0:{_index}'])
|
||||
a_mapped.extend([audio1[0]['index']])
|
||||
bitrate = int(float(audio1[0].get('bit_rate', 0))) / 1000
|
||||
channels = int(float(audio1[0].get('channels', 0)))
|
||||
|
@ -362,14 +373,16 @@ def build_commands(file, new_dir, movie_name):
|
|||
else:
|
||||
audio_cmd2.extend([f'-c:a:{used_audio}', 'copy'])
|
||||
elif audio6: # wrong language, right codec
|
||||
map_cmd.extend(['-map', '0:{index}'.format(index=audio6[0]['index'])])
|
||||
_index = audio6[0]['index']
|
||||
map_cmd.extend(['-map', f'0:{_index}'])
|
||||
a_mapped.extend([audio6[0]['index']])
|
||||
bitrate = int(float(audio6[0].get('bit_rate', 0))) / 1000
|
||||
channels = int(float(audio6[0].get('channels', 0)))
|
||||
audio_cmd2.extend([f'-c:a:{used_audio}', 'copy'])
|
||||
elif audio3:
|
||||
# wrong language, wrong codec just pick the default audio track
|
||||
map_cmd.extend(['-map', '0:{index}'.format(index=audio3[0]['index'])])
|
||||
_inded = audio3[0]['index']
|
||||
map_cmd.extend(['-map', f'0:{_index}'])
|
||||
a_mapped.extend([audio3[0]['index']])
|
||||
bitrate = int(float(audio3[0].get('bit_rate', 0))) / 1000
|
||||
channels = int(float(audio3[0].get('channels', 0)))
|
||||
|
@ -381,7 +394,7 @@ def build_commands(file, new_dir, movie_name):
|
|||
audio_cmd2.extend([f'-ac:a:{used_audio}', str(nzb2media.ACHANNELS2)])
|
||||
if audio_cmd2[1] == 'copy':
|
||||
audio_cmd2[1] = nzb2media.ACODEC2
|
||||
if nzb2media.ABITRATE2 and not (nzb2media.ABITRATE2 * 0.9 < bitrate < nzb2media.ABITRATE2 * 1.1):
|
||||
if nzb2media.ABITRATE2 and not nzb2media.ABITRATE2 * 0.9 < bitrate < nzb2media.ABITRATE2 * 1.1:
|
||||
audio_cmd2.extend([f'-b:a:{used_audio}', str(nzb2media.ABITRATE2)])
|
||||
if audio_cmd2[1] == 'copy':
|
||||
audio_cmd2[1] = nzb2media.ACODEC2
|
||||
|
@ -389,7 +402,7 @@ def build_commands(file, new_dir, movie_name):
|
|||
audio_cmd2.extend([f'-q:a:{used_audio}', str(nzb2media.OUTPUTQUALITYPERCENT)])
|
||||
if audio_cmd2[1] == 'copy':
|
||||
audio_cmd2[1] = nzb2media.ACODEC2
|
||||
if audio_cmd2[1] in ['aac', 'dts']:
|
||||
if audio_cmd2[1] in {'aac', 'dts'}:
|
||||
audio_cmd2[2:2] = ['-strict', '-2']
|
||||
if a_mapped[1] == a_mapped[0] and audio_cmd2[1:] == audio_cmd[1:]:
|
||||
# check for duplicate output track.
|
||||
|
@ -403,7 +416,8 @@ def build_commands(file, new_dir, movie_name):
|
|||
if audio['index'] in a_mapped:
|
||||
continue
|
||||
used_audio += 1
|
||||
map_cmd.extend(['-map', '0:{index}'.format(index=audio['index'])])
|
||||
_index = audio['index']
|
||||
map_cmd.extend(['-map', f'0:{_index}'])
|
||||
audio_cmd3 = []
|
||||
bitrate = int(float(audio.get('bit_rate', 0))) / 1000
|
||||
channels = int(float(audio.get('channels', 0)))
|
||||
|
@ -418,7 +432,7 @@ def build_commands(file, new_dir, movie_name):
|
|||
audio_cmd3.extend([f'-ac:a:{used_audio}', str(nzb2media.ACHANNELS3)])
|
||||
if audio_cmd3[1] == 'copy':
|
||||
audio_cmd3[1] = nzb2media.ACODEC3
|
||||
if nzb2media.ABITRATE3 and not (nzb2media.ABITRATE3 * 0.9 < bitrate < nzb2media.ABITRATE3 * 1.1):
|
||||
if nzb2media.ABITRATE3 and not nzb2media.ABITRATE3 * 0.9 < bitrate < nzb2media.ABITRATE3 * 1.1:
|
||||
audio_cmd3.extend([f'-b:a:{used_audio}', str(nzb2media.ABITRATE3)])
|
||||
if audio_cmd3[1] == 'copy':
|
||||
audio_cmd3[1] = nzb2media.ACODEC3
|
||||
|
@ -426,7 +440,7 @@ def build_commands(file, new_dir, movie_name):
|
|||
audio_cmd3.extend([f'-q:a:{used_audio}', str(nzb2media.OUTPUTQUALITYPERCENT)])
|
||||
if audio_cmd3[1] == 'copy':
|
||||
audio_cmd3[1] = nzb2media.ACODEC3
|
||||
if audio_cmd3[1] in ['aac', 'dts']:
|
||||
if audio_cmd3[1] in {'aac', 'dts'}:
|
||||
audio_cmd3[2:2] = ['-strict', '-2']
|
||||
audio_cmd.extend(audio_cmd3)
|
||||
s_mapped = []
|
||||
|
@ -445,17 +459,18 @@ def build_commands(file, new_dir, movie_name):
|
|||
for sub in subs1:
|
||||
if nzb2media.BURN and not burnt and os.path.isfile(input_file):
|
||||
subloc = 0
|
||||
for index in range(len(sub_streams)):
|
||||
if sub_streams[index]['index'] == sub['index']:
|
||||
for index, sub_stream in enumerate(sub_streams):
|
||||
if sub_stream['index'] == sub['index']:
|
||||
subloc = index
|
||||
break
|
||||
video_cmd.extend(['-vf', f'subtitles={input_file}:si={subloc}'])
|
||||
burnt = 1
|
||||
if not nzb2media.ALLOWSUBS:
|
||||
break
|
||||
if sub['codec_name'] in ['dvd_subtitle', 'VobSub'] and nzb2media.SCODEC == 'mov_text': # We can't convert these.
|
||||
continue
|
||||
map_cmd.extend(['-map', '0:{index}'.format(index=sub['index'])])
|
||||
if sub['codec_name'] in {'dvd_subtitle', 'VobSub'} and nzb2media.SCODEC == 'mov_text':
|
||||
continue # We can't convert these.
|
||||
_inded = sub['index']
|
||||
map_cmd.extend(['-map', f'0:{_index}'])
|
||||
s_mapped.extend([sub['index']])
|
||||
if nzb2media.SINCLUDE:
|
||||
for sub in sub_streams:
|
||||
|
@ -463,9 +478,10 @@ def build_commands(file, new_dir, movie_name):
|
|||
break
|
||||
if sub['index'] in s_mapped:
|
||||
continue
|
||||
if sub['codec_name'] in ['dvd_subtitle', 'VobSub'] and nzb2media.SCODEC == 'mov_text': # We can't convert these.
|
||||
if sub['codec_name'] in {'dvd_subtitle', 'VobSub'} and nzb2media.SCODEC == 'mov_text': # We can't convert these.
|
||||
continue
|
||||
map_cmd.extend(['-map', '0:{index}'.format(index=sub['index'])])
|
||||
_index = sub['index']
|
||||
map_cmd.extend(['-map', f'0:{_index}'])
|
||||
s_mapped.extend([sub['index']])
|
||||
if nzb2media.OUTPUTFASTSTART:
|
||||
other_cmd.extend(['-movflags', '+faststart'])
|
||||
|
@ -580,7 +596,7 @@ def extract_subs(file, newfile_path):
|
|||
log.error('Extracting subtitles has failed')
|
||||
|
||||
|
||||
def process_list(iterable, new_dir):
|
||||
def process_list(iterable):
|
||||
rem_list = []
|
||||
new_list = []
|
||||
combine = []
|
||||
|
@ -589,9 +605,9 @@ def process_list(iterable, new_dir):
|
|||
success = True
|
||||
for item in iterable:
|
||||
ext = os.path.splitext(item)[1].lower()
|
||||
if ext in ['.iso', '.bin', '.img'] and ext not in nzb2media.IGNOREEXTENSIONS:
|
||||
if ext in {'.iso', '.bin', '.img'} and ext not in nzb2media.IGNOREEXTENSIONS:
|
||||
log.debug(f'Attempting to rip disk image: {item}')
|
||||
new_list.extend(rip_iso(item, new_dir))
|
||||
new_list.extend(rip_iso(item))
|
||||
rem_list.append(item)
|
||||
elif re.match('.+VTS_[0-9][0-9]_[0-9].[Vv][Oo][Bb]', item) and '.vob' not in nzb2media.IGNOREEXTENSIONS:
|
||||
log.debug(f'Found VIDEO_TS image file: {item}')
|
||||
|
@ -638,7 +654,7 @@ def process_list(iterable, new_dir):
|
|||
return iterable, rem_list, new_list, success
|
||||
|
||||
|
||||
def mount_iso(item, new_dir): # Currently only supports Linux Mount when permissions allow.
|
||||
def mount_iso(item): # Currently only supports Linux Mount when permissions allow.
|
||||
if platform.system() == 'Windows':
|
||||
log.error(f'No mounting options available under Windows for image file {item}')
|
||||
return []
|
||||
|
@ -670,7 +686,7 @@ def mount_iso(item, new_dir): # Currently only supports Linux Mount when permis
|
|||
return ['failure'] # If we got here, nothing matched our criteria
|
||||
|
||||
|
||||
def rip_iso(item, new_dir):
|
||||
def rip_iso(item):
|
||||
new_files = []
|
||||
failure_dir = 'failure'
|
||||
# Mount the ISO in your OS and call combineVTS.
|
||||
|
@ -678,7 +694,7 @@ def rip_iso(item, new_dir):
|
|||
log.debug(f'No 7zip installed. Attempting to mount image file {item}')
|
||||
try:
|
||||
# Currently only works for Linux.
|
||||
new_files = mount_iso(item, new_dir)
|
||||
new_files = mount_iso(item)
|
||||
except Exception:
|
||||
log.error(f'Failed to mount and extract from image file {item}')
|
||||
new_files = [failure_dir]
|
||||
|
@ -730,7 +746,7 @@ def rip_iso(item, new_dir):
|
|||
new_files.append({item: {'name': name, 'files': combined}})
|
||||
if not new_files:
|
||||
log.error(f'No VIDEO_TS or BDMV/SOURCE folder found in image file. Attempting to mount and scan {item}')
|
||||
new_files = mount_iso(item, new_dir)
|
||||
new_files = mount_iso(item)
|
||||
except Exception:
|
||||
log.error(f'Failed to extract from image file {item}')
|
||||
new_files = [failure_dir]
|
||||
|
@ -832,7 +848,7 @@ def transcode_directory(dir_name):
|
|||
new_dir = dir_name
|
||||
movie_name = os.path.splitext(os.path.split(dir_name)[1])[0]
|
||||
file_list = nzb2media.list_media_files(dir_name, media=True, audio=False, meta=False, archives=False)
|
||||
file_list, rem_list, new_list, success = process_list(file_list, new_dir)
|
||||
file_list, rem_list, new_list, success = process_list(file_list)
|
||||
if not success:
|
||||
return 1, dir_name
|
||||
for file in file_list:
|
||||
|
|
|
@ -66,24 +66,22 @@ def external_script(output_destination, torrent_name, torrent_label, settings):
|
|||
if param == 'FN':
|
||||
command.append(f'{file}')
|
||||
continue
|
||||
elif param == 'FP':
|
||||
if param == 'FP':
|
||||
command.append(f'{file_path}')
|
||||
continue
|
||||
elif param == 'TN':
|
||||
if param == 'TN':
|
||||
command.append(f'{torrent_name}')
|
||||
continue
|
||||
elif param == 'TL':
|
||||
if param == 'TL':
|
||||
command.append(f'{torrent_label}')
|
||||
continue
|
||||
elif param == 'DN':
|
||||
if param == 'DN':
|
||||
if nzb2media.USER_SCRIPT_RUNONCE == 1:
|
||||
command.append(f'{output_destination}')
|
||||
else:
|
||||
command.append(f'{dirpath}')
|
||||
continue
|
||||
else:
|
||||
command.append(param)
|
||||
continue
|
||||
command.append(param)
|
||||
cmd = ''
|
||||
for item in command:
|
||||
cmd = f'{cmd} {item}'
|
||||
|
|
|
@ -35,9 +35,9 @@ def process_dir(path, link):
|
|||
dir_contents = os.listdir(path)
|
||||
# search for single files and move them into their own folder for post-processing
|
||||
# Generate list of sync files
|
||||
sync_files = (item for item in dir_contents if os.path.splitext(item)[1] in ['.!sync', '.bts'])
|
||||
sync_files = (item for item in dir_contents if os.path.splitext(item)[1] in {'.!sync', '.bts'})
|
||||
# Generate a list of file paths
|
||||
filepaths = (os.path.join(path, item) for item in dir_contents if item not in ['Thumbs.db', 'thumbs.db'])
|
||||
filepaths = (os.path.join(path, item) for item in dir_contents if item not in {'Thumbs.db', 'thumbs.db'})
|
||||
# Generate a list of media files
|
||||
mediafiles = (item for item in filepaths if os.path.isfile(item))
|
||||
if not any(sync_files):
|
||||
|
@ -53,7 +53,7 @@ def process_dir(path, link):
|
|||
directories = (path for path in path_contents if os.path.isdir(path))
|
||||
for directory in directories:
|
||||
dir_contents = os.listdir(directory)
|
||||
sync_files = (item for item in dir_contents if os.path.splitext(item)[1] in ['.!sync', '.bts'])
|
||||
sync_files = (item for item in dir_contents if os.path.splitext(item)[1] in {'.!sync', '.bts'})
|
||||
if not any(dir_contents) or any(sync_files):
|
||||
continue
|
||||
folders.append(directory)
|
||||
|
|
|
@ -33,7 +33,7 @@ def char_replace(name_in):
|
|||
encoding = 'cp850'
|
||||
break
|
||||
# Detect ISO-8859-15
|
||||
elif (character >= 0xA6) & (character <= 0xFF):
|
||||
if (character >= 0xA6) & (character <= 0xFF):
|
||||
encoding = 'iso-8859-15'
|
||||
break
|
||||
else:
|
||||
|
@ -42,11 +42,11 @@ def char_replace(name_in):
|
|||
encoding = 'utf-8'
|
||||
break
|
||||
# Detect CP850
|
||||
elif (character >= 0x80) & (character <= 0xA5):
|
||||
if (character >= 0x80) & (character <= 0xA5):
|
||||
encoding = 'cp850'
|
||||
break
|
||||
# Detect ISO-8859-15
|
||||
elif (character >= 0xA6) & (character <= 0xFF):
|
||||
if (character >= 0xA6) & (character <= 0xFF):
|
||||
encoding = 'iso-8859-15'
|
||||
break
|
||||
if encoding:
|
||||
|
|
|
@ -67,7 +67,7 @@ def move_file(filename, path, link):
|
|||
copy_link(filename, newfile, link)
|
||||
|
||||
|
||||
def is_min_size(input_name, min_size):
|
||||
def is_min_size(input_name, min_size) -> bool:
|
||||
file_name, file_ext = os.path.splitext(os.path.basename(input_name))
|
||||
# audio files we need to check directory size not file size
|
||||
input_size = os.path.getsize(input_name)
|
||||
|
@ -80,6 +80,7 @@ def is_min_size(input_name, min_size):
|
|||
# Ignore files under a certain size
|
||||
if input_size > min_size * 1048576:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def is_archive_file(filename):
|
||||
|
|
|
@ -13,8 +13,8 @@ log = logging.getLogger(__name__)
|
|||
log.addHandler(logging.NullHandler())
|
||||
|
||||
|
||||
def find_imdbid(dir_name, input_name, omdb_api_key):
|
||||
imdbid = None
|
||||
def find_imdbid(dir_name, input_name, omdb_api_key) -> str:
|
||||
imdbid = ''
|
||||
log.info(f'Attemping imdbID lookup for {input_name}')
|
||||
# find imdbid in dirName
|
||||
log.info('Searching folder and file names for imdbID ...')
|
||||
|
@ -56,13 +56,13 @@ def find_imdbid(dir_name, input_name, omdb_api_key):
|
|||
url = 'http://www.omdbapi.com'
|
||||
if not omdb_api_key:
|
||||
log.info('Unable to determine imdbID: No api key provided for omdbapi.com.')
|
||||
return
|
||||
return ''
|
||||
log.debug(f'Opening URL: {url}')
|
||||
try:
|
||||
response = requests.get(url, params={'apikey': omdb_api_key, 'y': year, 't': title}, verify=False, timeout=(60, 300))
|
||||
except requests.ConnectionError:
|
||||
log.error(f'Unable to open URL {url}')
|
||||
return
|
||||
return ''
|
||||
try:
|
||||
results = response.json()
|
||||
except Exception:
|
||||
|
|
|
@ -10,14 +10,15 @@ from nzb2media.utils.paths import make_dir
|
|||
|
||||
log = logging.getLogger(__name__)
|
||||
log.addHandler(logging.NullHandler())
|
||||
|
||||
try:
|
||||
from jaraco.windows.filesystem import islink, readlink
|
||||
except ImportError:
|
||||
if os.name == 'nt':
|
||||
raise
|
||||
else:
|
||||
if os.name != 'nt':
|
||||
from os.path import islink
|
||||
from os import readlink
|
||||
else:
|
||||
raise
|
||||
|
||||
|
||||
def copy_link(src, target_link, use_link):
|
||||
|
|
|
@ -39,7 +39,8 @@ def clean_file_name(filename):
|
|||
return filename.strip()
|
||||
|
||||
|
||||
def is_sample(input_name):
|
||||
def is_sample(input_name) -> bool:
|
||||
# Ignore 'sample' in files
|
||||
if re.search('(^|[\\W_])sample\\d*[\\W_]', input_name.lower()):
|
||||
return True
|
||||
return False
|
||||
|
|
|
@ -67,7 +67,7 @@ def parse_deluge(args):
|
|||
return input_directory, input_name, input_category, input_hash, input_id
|
||||
|
||||
|
||||
def parse_transmission(args):
|
||||
def parse_transmission():
|
||||
# Transmission usage: call TorrenToMedia.py (%TR_TORRENT_DIR% %TR_TORRENT_NAME% is passed on as environmental variables)
|
||||
input_directory = os.path.normpath(os.getenv('TR_TORRENT_DIR'))
|
||||
input_name = os.getenv('TR_TORRENT_NAME')
|
||||
|
@ -77,7 +77,7 @@ def parse_transmission(args):
|
|||
return input_directory, input_name, input_category, input_hash, input_id
|
||||
|
||||
|
||||
def parse_synods(args):
|
||||
def parse_synods():
|
||||
# Synology/Transmission usage: call TorrenToMedia.py (%TR_TORRENT_DIR% %TR_TORRENT_NAME% is passed on as environmental variables)
|
||||
input_directory = ''
|
||||
input_id = ''
|
||||
|
|
|
@ -26,7 +26,7 @@ def onerror(func, path, exc_info):
|
|||
os.chmod(path, stat.S_IWUSR)
|
||||
func(path)
|
||||
else:
|
||||
raise Exception
|
||||
raise Exception(exc_info)
|
||||
|
||||
|
||||
def remove_dir(dir_name):
|
||||
|
|
|
@ -10,9 +10,16 @@ import typing
|
|||
import nzb2media
|
||||
|
||||
if os.name == 'nt':
|
||||
# pylint: disable-next=no-name-in-module
|
||||
from win32event import CreateMutex
|
||||
from win32api import CloseHandle, GetLastError
|
||||
|
||||
# pylint: disable-next=no-name-in-module
|
||||
from win32api import CloseHandle
|
||||
|
||||
# pylint: disable-next=no-name-in-module
|
||||
from win32api import GetLastError
|
||||
from winerror import ERROR_ALREADY_EXISTS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.addHandler(logging.NullHandler())
|
||||
|
||||
|
@ -100,7 +107,7 @@ def restart():
|
|||
install_type = nzb2media.version_check.CheckVersion().install_type
|
||||
status = 0
|
||||
popen_list = []
|
||||
if install_type in ('git', 'source'):
|
||||
if install_type in {'git', 'source'}:
|
||||
popen_list = [sys.executable, nzb2media.APP_FILENAME]
|
||||
if popen_list:
|
||||
popen_list += nzb2media.SYS_ARGV
|
||||
|
|
|
@ -15,14 +15,15 @@ log.addHandler(logging.NullHandler())
|
|||
torrent_clients = {'deluge': deluge, 'qbittorrent': qbittorrent, 'transmission': transmission, 'utorrent': utorrent, 'synods': synology}
|
||||
|
||||
|
||||
def create_torrent_class(client_agent):
|
||||
def create_torrent_class(client_agent) -> object | None:
|
||||
if nzb2media.APP_NAME != 'TorrentToMedia.py':
|
||||
return # Skip loading Torrent for NZBs.
|
||||
return None # Skip loading Torrent for NZBs.
|
||||
try:
|
||||
agent = torrent_clients[client_agent]
|
||||
except KeyError:
|
||||
return
|
||||
return None
|
||||
else:
|
||||
deluge.configure_client()
|
||||
return agent.configure_client()
|
||||
|
||||
|
||||
|
|
|
@ -38,7 +38,8 @@ class CheckVersion:
|
|||
def run(self):
|
||||
self.check_for_new_version()
|
||||
|
||||
def find_install_type(self):
|
||||
@staticmethod
|
||||
def find_install_type():
|
||||
"""
|
||||
Determine how this copy of SB was installed.
|
||||
returns: type of installation. Possible values are:
|
||||
|
@ -77,13 +78,16 @@ class CheckVersion:
|
|||
|
||||
|
||||
class UpdateManager:
|
||||
def get_github_repo_user(self):
|
||||
@staticmethod
|
||||
def get_github_repo_user():
|
||||
return nzb2media.GIT_USER
|
||||
|
||||
def get_github_repo(self):
|
||||
@staticmethod
|
||||
def get_github_repo():
|
||||
return nzb2media.GIT_REPO
|
||||
|
||||
def get_github_branch(self):
|
||||
@staticmethod
|
||||
def get_github_branch():
|
||||
return nzb2media.GIT_BRANCH
|
||||
|
||||
|
||||
|
@ -98,7 +102,8 @@ class GitUpdateManager(UpdateManager):
|
|||
self._num_commits_behind = 0
|
||||
self._num_commits_ahead = 0
|
||||
|
||||
def _git_error(self):
|
||||
@staticmethod
|
||||
def _git_error():
|
||||
log.debug('Unable to find your git executable - Set git_path in your autoProcessMedia.cfg OR delete your .git folder and run from source to enable updates.')
|
||||
|
||||
def _find_working_git(self):
|
||||
|
@ -134,9 +139,10 @@ class GitUpdateManager(UpdateManager):
|
|||
log.debug('Unable to find your git executable - Set git_path in your autoProcessMedia.cfg OR delete your .git folder and run from source to enable updates.')
|
||||
return None
|
||||
|
||||
def _run_git(self, git_path, args):
|
||||
result = None
|
||||
proc_err = None
|
||||
@staticmethod
|
||||
def _run_git(git_path, args):
|
||||
result = ''
|
||||
proc_err = ''
|
||||
if not git_path:
|
||||
log.debug('No git specified, can\'t use git commands')
|
||||
proc_status = 1
|
||||
|
@ -157,7 +163,7 @@ class GitUpdateManager(UpdateManager):
|
|||
if proc_status == 0:
|
||||
log.debug(f'{cmd} : returned successful')
|
||||
proc_status = 0
|
||||
elif nzb2media.LOG_GIT and proc_status in (1, 128):
|
||||
elif nzb2media.LOG_GIT and proc_status in {1, 128}:
|
||||
log.debug(f'{cmd} returned : {result}')
|
||||
else:
|
||||
if nzb2media.LOG_GIT:
|
||||
|
@ -234,7 +240,8 @@ class GitUpdateManager(UpdateManager):
|
|||
if self._num_commits_ahead:
|
||||
log.error(f'Local branch is ahead of {self.branch}. Automatic update not possible.')
|
||||
elif self._num_commits_behind:
|
||||
log.info('There is a newer version available (you\'re {x} commit{s} behind)'.format(x=self._num_commits_behind, s='s' if self._num_commits_behind > 1 else ''))
|
||||
_plural = 's' if self._num_commits_behind > 1 else ''
|
||||
log.info(f'There is a newer version available (you\'re {self._num_commits_behind} commit{_plural} behind)')
|
||||
else:
|
||||
return
|
||||
|
||||
|
@ -280,7 +287,7 @@ class SourceUpdateManager(UpdateManager):
|
|||
self._cur_commit_hash = None
|
||||
return
|
||||
try:
|
||||
with open(version_file) as fin:
|
||||
with open(version_file, encoding='utf-8') as fin:
|
||||
self._cur_commit_hash = fin.read().strip(' \n\r')
|
||||
except OSError as error:
|
||||
log.debug(f'Unable to open \'version.txt\': {error}')
|
||||
|
@ -338,7 +345,8 @@ class SourceUpdateManager(UpdateManager):
|
|||
if not self._cur_commit_hash:
|
||||
log.error('Unknown current version number, don\'t know if we should update or not')
|
||||
elif self._num_commits_behind > 0:
|
||||
log.info('There is a newer version available (you\'re {x} commit{s} behind)'.format(x=self._num_commits_behind, s='s' if self._num_commits_behind > 1 else ''))
|
||||
_plural = 's' if self._num_commits_behind > 1 else ''
|
||||
log.info(f'There is a newer version available (you\'re {self._num_commits_behind} commit{_plural} behind)')
|
||||
else:
|
||||
return
|
||||
|
||||
|
@ -387,7 +395,7 @@ class SourceUpdateManager(UpdateManager):
|
|||
# Avoid DLL access problem on WIN32/64
|
||||
# These files needing to be updated manually
|
||||
# or find a way to kill the access from memory
|
||||
if curfile in ('unrar.dll', 'unrar64.dll'):
|
||||
if curfile in {'unrar.dll', 'unrar64.dll'}:
|
||||
try:
|
||||
os.chmod(new_path, stat.S_IWRITE)
|
||||
os.remove(new_path)
|
||||
|
@ -402,7 +410,7 @@ class SourceUpdateManager(UpdateManager):
|
|||
os.renames(old_path, new_path)
|
||||
# update version.txt with commit hash
|
||||
try:
|
||||
with open(version_path, 'w') as ver_file:
|
||||
with open(version_path, 'w', encoding='utf-8') as ver_file:
|
||||
ver_file.write(self._newest_commit_hash)
|
||||
except OSError as error:
|
||||
log.error(f'Unable to write version file, update not complete: {error}')
|
||||
|
|
|
@ -2,6 +2,6 @@ import sys
|
|||
|
||||
import nzbToMedia
|
||||
|
||||
section = 'CouchPotato'
|
||||
result = nzbToMedia.main(sys.argv, section)
|
||||
SECTION = 'CouchPotato'
|
||||
result = nzbToMedia.main(sys.argv, SECTION)
|
||||
sys.exit(result)
|
||||
|
|
|
@ -2,6 +2,6 @@ import sys
|
|||
|
||||
import nzbToMedia
|
||||
|
||||
section = 'Gamez'
|
||||
result = nzbToMedia.main(sys.argv, section)
|
||||
SECTION = 'Gamez'
|
||||
result = nzbToMedia.main(sys.argv, SECTION)
|
||||
sys.exit(result)
|
||||
|
|
|
@ -2,6 +2,6 @@ import sys
|
|||
|
||||
import nzbToMedia
|
||||
|
||||
section = 'HeadPhones'
|
||||
result = nzbToMedia.main(sys.argv, section)
|
||||
SECTION = 'HeadPhones'
|
||||
result = nzbToMedia.main(sys.argv, SECTION)
|
||||
sys.exit(result)
|
||||
|
|
|
@ -2,6 +2,6 @@ import sys
|
|||
|
||||
import nzbToMedia
|
||||
|
||||
section = 'LazyLibrarian'
|
||||
result = nzbToMedia.main(sys.argv, section)
|
||||
SECTION = 'LazyLibrarian'
|
||||
result = nzbToMedia.main(sys.argv, SECTION)
|
||||
sys.exit(result)
|
||||
|
|
|
@ -2,6 +2,6 @@ import sys
|
|||
|
||||
import nzbToMedia
|
||||
|
||||
section = 'Lidarr'
|
||||
result = nzbToMedia.main(sys.argv, section)
|
||||
SECTION = 'Lidarr'
|
||||
result = nzbToMedia.main(sys.argv, SECTION)
|
||||
sys.exit(result)
|
||||
|
|
|
@ -65,4 +65,4 @@ def main(args, section=None):
|
|||
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit(main(sys.argv))
|
||||
sys.exit(main(sys.argv))
|
||||
|
|
|
@ -2,6 +2,6 @@ import sys
|
|||
|
||||
import nzbToMedia
|
||||
|
||||
section = 'Mylar'
|
||||
result = nzbToMedia.main(sys.argv, section)
|
||||
SECTION = 'Mylar'
|
||||
result = nzbToMedia.main(sys.argv, SECTION)
|
||||
sys.exit(result)
|
||||
|
|
|
@ -2,6 +2,6 @@ import sys
|
|||
|
||||
import nzbToMedia
|
||||
|
||||
section = 'NzbDrone'
|
||||
result = nzbToMedia.main(sys.argv, section)
|
||||
SECTION = 'NzbDrone'
|
||||
result = nzbToMedia.main(sys.argv, SECTION)
|
||||
sys.exit(result)
|
||||
|
|
|
@ -2,6 +2,6 @@ import sys
|
|||
|
||||
import nzbToMedia
|
||||
|
||||
section = 'Radarr'
|
||||
result = nzbToMedia.main(sys.argv, section)
|
||||
SECTION = 'Radarr'
|
||||
result = nzbToMedia.main(sys.argv, SECTION)
|
||||
sys.exit(result)
|
||||
|
|
|
@ -2,6 +2,6 @@ import sys
|
|||
|
||||
import nzbToMedia
|
||||
|
||||
section = 'SiCKRAGE'
|
||||
result = nzbToMedia.main(sys.argv, section)
|
||||
SECTION = 'SiCKRAGE'
|
||||
result = nzbToMedia.main(sys.argv, SECTION)
|
||||
sys.exit(result)
|
||||
|
|
|
@ -2,6 +2,6 @@ import sys
|
|||
|
||||
import nzbToMedia
|
||||
|
||||
section = 'SickBeard'
|
||||
result = nzbToMedia.main(sys.argv, section)
|
||||
SECTION = 'SickBeard'
|
||||
result = nzbToMedia.main(sys.argv, SECTION)
|
||||
sys.exit(result)
|
||||
|
|
|
@ -2,6 +2,6 @@ import sys
|
|||
|
||||
import nzbToMedia
|
||||
|
||||
section = 'Watcher3'
|
||||
result = nzbToMedia.main(sys.argv, section)
|
||||
SECTION = 'Watcher3'
|
||||
result = nzbToMedia.main(sys.argv, SECTION)
|
||||
sys.exit(result)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue