mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-08-21 22:03:18 -07:00
Merge branch 'nightly' into dependabot/pip/nightly/importlib-metadata-8.5.0
This commit is contained in:
commit
cefe500217
79 changed files with 1174 additions and 766 deletions
24
CHANGELOG.md
24
CHANGELOG.md
|
@ -1,5 +1,27 @@
|
|||
# Changelog
|
||||
|
||||
## v2.14.6 (2024-10-12)
|
||||
|
||||
* Newsletters:
|
||||
* Fix: Allow formatting newsletter date parameters.
|
||||
* Change: Support apscheduler compatible cron expressions.
|
||||
* UI:
|
||||
* Fix: Round runtime before converting to human duration.
|
||||
* Fix: Make recently added/watched rows touch scrollable.
|
||||
* Other:
|
||||
* Fix: Auto-updater not running.
|
||||
|
||||
|
||||
## v2.14.5 (2024-09-20)
|
||||
|
||||
* Activity:
|
||||
* Fix: Display of 2k resolution on activity card.
|
||||
* Notifications:
|
||||
* Fix: ntfy notifications with special characters failing to send.
|
||||
* Other:
|
||||
* Fix: Memory leak with database closing. (#2404)
|
||||
|
||||
|
||||
## v2.14.4 (2024-08-10)
|
||||
|
||||
* Notifications:
|
||||
|
@ -8,7 +30,7 @@
|
|||
* UI:
|
||||
* Fix: macOS platform capitalization.
|
||||
* Other:
|
||||
* Fix: Remove deprecated getdefaultlocale (Thanks @teodorstelian) (#2364, #2345)
|
||||
* Fix: Remove deprecated getdefaultlocale. (Thanks @teodorstelian) (#2364, #2345)
|
||||
|
||||
|
||||
## v2.14.3 (2024-06-19)
|
||||
|
|
|
@ -1478,7 +1478,8 @@ a:hover .dashboard-stats-square {
|
|||
text-align: center;
|
||||
position: relative;
|
||||
z-index: 0;
|
||||
overflow: hidden;
|
||||
overflow: auto;
|
||||
scrollbar-width: none;
|
||||
}
|
||||
.dashboard-recent-media {
|
||||
width: 100%;
|
||||
|
|
|
@ -92,10 +92,10 @@
|
|||
<h3 class="pull-left"><span id="recently-added-xml">Recently Added</span></h3>
|
||||
<ul class="nav nav-header nav-dashboard pull-right" style="margin-top: -3px;">
|
||||
<li>
|
||||
<a href="#" id="recently-added-page-left" class="paginate btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||
<a href="#" id="recently-added-page-left" class="paginate-added btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="#" id="recently-added-page-right" class="paginate btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||
<a href="#" id="recently-added-page-right" class="paginate-added btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||
</li>
|
||||
</ul>
|
||||
<div class="button-bar">
|
||||
|
@ -936,10 +936,14 @@
|
|||
count: recently_added_count,
|
||||
media_type: recently_added_type
|
||||
},
|
||||
beforeSend: function () {
|
||||
$(".dashboard-recent-media-row").animate({ scrollLeft: 0 }, 1000);
|
||||
},
|
||||
complete: function (xhr, status) {
|
||||
$("#recentlyAdded").html(xhr.responseText);
|
||||
$('#ajaxMsg').fadeOut();
|
||||
highlightAddedScrollerButton();
|
||||
highlightScrollerButton("#recently-added");
|
||||
paginateScroller("#recently-added", ".paginate-added");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -955,57 +959,11 @@
|
|||
recentlyAdded(recently_added_count, recently_added_type);
|
||||
}
|
||||
|
||||
function highlightAddedScrollerButton() {
|
||||
var scroller = $("#recently-added-row-scroller");
|
||||
var numElems = scroller.find("li:visible").length;
|
||||
scroller.width(numElems * 175);
|
||||
if (scroller.width() > $("body").find(".container-fluid").width()) {
|
||||
$("#recently-added-page-right").removeClass("disabled");
|
||||
} else {
|
||||
$("#recently-added-page-right").addClass("disabled");
|
||||
}
|
||||
}
|
||||
|
||||
$(window).resize(function () {
|
||||
highlightAddedScrollerButton();
|
||||
});
|
||||
|
||||
function resetScroller() {
|
||||
leftTotal = 0;
|
||||
$("#recently-added-row-scroller").animate({ left: leftTotal }, 1000);
|
||||
$("#recently-added-page-left").addClass("disabled").blur();
|
||||
}
|
||||
|
||||
var leftTotal = 0;
|
||||
$(".paginate").click(function (e) {
|
||||
e.preventDefault();
|
||||
var scroller = $("#recently-added-row-scroller");
|
||||
var containerWidth = $("body").find(".container-fluid").width();
|
||||
var scrollAmount = $(this).data("id") * parseInt((containerWidth - 15) / 175) * 175;
|
||||
var leftMax = Math.min(-parseInt(scroller.width()) + Math.abs(scrollAmount), 0);
|
||||
|
||||
leftTotal = Math.max(Math.min(leftTotal + scrollAmount, 0), leftMax);
|
||||
scroller.animate({ left: leftTotal }, 250);
|
||||
|
||||
if (leftTotal === 0) {
|
||||
$("#recently-added-page-left").addClass("disabled").blur();
|
||||
} else {
|
||||
$("#recently-added-page-left").removeClass("disabled");
|
||||
}
|
||||
|
||||
if (leftTotal === leftMax) {
|
||||
$("#recently-added-page-right").addClass("disabled").blur();
|
||||
} else {
|
||||
$("#recently-added-page-right").removeClass("disabled");
|
||||
}
|
||||
});
|
||||
|
||||
$('#recently-added-toggles').on('change', function () {
|
||||
$('#recently-added-toggles > label').removeClass('active');
|
||||
selected_filter = $('input[name=recently-added-toggle]:checked', '#recently-added-toggles');
|
||||
$(selected_filter).closest('label').addClass('active');
|
||||
recently_added_type = $(selected_filter).val();
|
||||
resetScroller();
|
||||
setLocalStorage('home_stats_recently_added_type', recently_added_type);
|
||||
recentlyAdded(recently_added_count, recently_added_type);
|
||||
});
|
||||
|
@ -1013,7 +971,6 @@
|
|||
$('#recently-added-count').change(function () {
|
||||
forceMinMax($(this));
|
||||
recently_added_count = $(this).val();
|
||||
resetScroller();
|
||||
setLocalStorage('home_stats_recently_added_count', recently_added_count);
|
||||
recentlyAdded(recently_added_count, recently_added_type);
|
||||
});
|
||||
|
|
|
@ -360,7 +360,8 @@ function humanDuration(ms, sig='dhm', units='ms', return_seconds=300000) {
|
|||
sig = 'dhms'
|
||||
}
|
||||
|
||||
ms = ms * factors[units];
|
||||
r = factors[sig.slice(-1)];
|
||||
ms = Math.round(ms * factors[units] / r) * r;
|
||||
|
||||
h = ms % factors['d'];
|
||||
d = Math.trunc(ms / factors['d']);
|
||||
|
@ -929,3 +930,50 @@ $('.modal').on('hide.bs.modal', function (e) {
|
|||
$.fn.hasScrollBar = function() {
|
||||
return this.get(0).scrollHeight > this.get(0).clientHeight;
|
||||
}
|
||||
|
||||
function paginateScroller(scrollerId, buttonClass) {
|
||||
$(buttonClass).click(function (e) {
|
||||
e.preventDefault();
|
||||
var scroller = $(scrollerId + "-row-scroller");
|
||||
var scrollerParent = scroller.parent();
|
||||
var containerWidth = scrollerParent.width();
|
||||
var scrollCurrent = scrollerParent.scrollLeft();
|
||||
var scrollAmount = $(this).data("id") * parseInt(containerWidth / 175) * 175;
|
||||
var scrollMax = scroller.width() - Math.abs(scrollAmount);
|
||||
var scrollTotal = Math.min(parseInt(scrollCurrent / 175) * 175 + scrollAmount, scrollMax);
|
||||
scrollerParent.animate({ scrollLeft: scrollTotal }, 250);
|
||||
});
|
||||
}
|
||||
|
||||
function highlightScrollerButton(scrollerId) {
|
||||
var scroller = $(scrollerId + "-row-scroller");
|
||||
var scrollerParent = scroller.parent();
|
||||
var buttonLeft = $(scrollerId + "-page-left");
|
||||
var buttonRight = $(scrollerId + "-page-right");
|
||||
|
||||
var numElems = scroller.find("li").length;
|
||||
scroller.width(numElems * 175);
|
||||
$(buttonLeft).addClass("disabled").blur();
|
||||
if (scroller.width() > scrollerParent.width()) {
|
||||
$(buttonRight).removeClass("disabled");
|
||||
} else {
|
||||
$(buttonRight).addClass("disabled");
|
||||
}
|
||||
|
||||
scrollerParent.scroll(function () {
|
||||
var scrollCurrent = $(this).scrollLeft();
|
||||
var scrollMax = scroller.width() - $(this).width();
|
||||
|
||||
if (scrollCurrent == 0) {
|
||||
$(buttonLeft).addClass("disabled").blur();
|
||||
} else {
|
||||
$(buttonLeft).removeClass("disabled");
|
||||
}
|
||||
|
||||
if (scrollCurrent >= scrollMax) {
|
||||
$(buttonRight).addClass("disabled").blur();
|
||||
} else {
|
||||
$(buttonRight).removeClass("disabled");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -149,10 +149,10 @@ DOCUMENTATION :: END
|
|||
<div class="table-card-header">
|
||||
<ul class="nav nav-header nav-dashboard pull-right">
|
||||
<li>
|
||||
<a href="#" id="recently-watched-page-left" class="paginate-watched btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||
<a href="#" id="recently-watched-page-left" class="paginate-watched btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="#" id="recently-watched-page-right" class="paginate-watched btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||
<a href="#" id="recently-watched-page-right" class="paginate-watched btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||
</li>
|
||||
</ul>
|
||||
<div class="header-bar">
|
||||
|
@ -175,10 +175,10 @@ DOCUMENTATION :: END
|
|||
<div class="table-card-header">
|
||||
<ul class="nav nav-header nav-dashboard pull-right">
|
||||
<li>
|
||||
<a href="#" id="recently-added-page-left" class="paginate-added btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||
<a href="#" id="recently-added-page-left" class="paginate-added btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="#" id="recently-added-page-right" class="paginate-added btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||
<a href="#" id="recently-added-page-right" class="paginate-added btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||
</li>
|
||||
</ul>
|
||||
<div class="header-bar">
|
||||
|
@ -690,7 +690,8 @@ DOCUMENTATION :: END
|
|||
},
|
||||
complete: function(xhr, status) {
|
||||
$("#library-recently-watched").html(xhr.responseText);
|
||||
highlightWatchedScrollerButton();
|
||||
highlightScrollerButton("#recently-watched");
|
||||
paginateScroller("#recently-watched", ".paginate-watched");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -706,7 +707,8 @@ DOCUMENTATION :: END
|
|||
},
|
||||
complete: function(xhr, status) {
|
||||
$("#library-recently-added").html(xhr.responseText);
|
||||
highlightAddedScrollerButton();
|
||||
highlightScrollerButton("#recently-added");
|
||||
paginateScroller("#recently-added", ".paginate-added");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -716,83 +718,8 @@ DOCUMENTATION :: END
|
|||
recentlyAdded();
|
||||
% endif
|
||||
|
||||
function highlightWatchedScrollerButton() {
|
||||
var scroller = $("#recently-watched-row-scroller");
|
||||
var numElems = scroller.find("li").length;
|
||||
scroller.width(numElems * 175);
|
||||
if (scroller.width() > $("#library-recently-watched").width()) {
|
||||
$("#recently-watched-page-right").removeClass("disabled");
|
||||
} else {
|
||||
$("#recently-watched-page-right").addClass("disabled");
|
||||
}
|
||||
}
|
||||
|
||||
function highlightAddedScrollerButton() {
|
||||
var scroller = $("#recently-added-row-scroller");
|
||||
var numElems = scroller.find("li").length;
|
||||
scroller.width(numElems * 175);
|
||||
if (scroller.width() > $("#library-recently-added").width()) {
|
||||
$("#recently-added-page-right").removeClass("disabled");
|
||||
} else {
|
||||
$("#recently-added-page-right").addClass("disabled");
|
||||
}
|
||||
}
|
||||
|
||||
$(window).resize(function() {
|
||||
highlightWatchedScrollerButton();
|
||||
highlightAddedScrollerButton();
|
||||
});
|
||||
|
||||
$('div.art-face').animate({ opacity: 0.2 }, { duration: 1000 });
|
||||
|
||||
var leftTotalWatched = 0;
|
||||
$(".paginate-watched").click(function (e) {
|
||||
e.preventDefault();
|
||||
var scroller = $("#recently-watched-row-scroller");
|
||||
var containerWidth = $("#library-recently-watched").width();
|
||||
var scrollAmount = $(this).data("id") * parseInt(containerWidth / 175) * 175;
|
||||
var leftMax = Math.min(-parseInt(scroller.width()) + Math.abs(scrollAmount), 0);
|
||||
|
||||
leftTotalWatched = Math.max(Math.min(leftTotalWatched + scrollAmount, 0), leftMax);
|
||||
scroller.animate({ left: leftTotalWatched }, 250);
|
||||
|
||||
if (leftTotalWatched == 0) {
|
||||
$("#recently-watched-page-left").addClass("disabled").blur();
|
||||
} else {
|
||||
$("#recently-watched-page-left").removeClass("disabled");
|
||||
}
|
||||
|
||||
if (leftTotalWatched == leftMax) {
|
||||
$("#recently-watched-page-right").addClass("disabled").blur();
|
||||
} else {
|
||||
$("#recently-watched-page-right").removeClass("disabled");
|
||||
}
|
||||
});
|
||||
|
||||
var leftTotalAdded = 0;
|
||||
$(".paginate-added").click(function (e) {
|
||||
e.preventDefault();
|
||||
var scroller = $("#recently-added-row-scroller");
|
||||
var containerWidth = $("#library-recently-added").width();
|
||||
var scrollAmount = $(this).data("id") * parseInt(containerWidth / 175) * 175;
|
||||
var leftMax = Math.min(-parseInt(scroller.width()) + Math.abs(scrollAmount), 0);
|
||||
|
||||
leftTotalAdded = Math.max(Math.min(leftTotalAdded + scrollAmount, 0), leftMax);
|
||||
scroller.animate({ left: leftTotalAdded }, 250);
|
||||
|
||||
if (leftTotalAdded == 0) {
|
||||
$("#recently-added-page-left").addClass("disabled").blur();
|
||||
} else {
|
||||
$("#recently-added-page-left").removeClass("disabled");
|
||||
}
|
||||
|
||||
if (leftTotalAdded == leftMax) {
|
||||
$("#recently-added-page-right").addClass("disabled").blur();
|
||||
} else {
|
||||
$("#recently-added-page-right").removeClass("disabled");
|
||||
}
|
||||
});
|
||||
|
||||
$(document).ready(function () {
|
||||
|
||||
// Javascript to enable link to tab
|
||||
|
|
|
@ -36,7 +36,7 @@ DOCUMENTATION :: END
|
|||
|
||||
%>
|
||||
<div class="dashboard-recent-media-row">
|
||||
<div id="recently-added-row-scroller" style="left: 0;">
|
||||
<div id="recently-added-row-scroller">
|
||||
<ul class="dashboard-recent-media list-unstyled">
|
||||
% for item in data:
|
||||
<li>
|
||||
|
|
|
@ -50,7 +50,10 @@
|
|||
</div>
|
||||
<p class="help-block">
|
||||
<span id="simple_cron_message">Set the schedule for the newsletter.</span>
|
||||
<span id="custom_cron_message">Set the schedule for the newsletter using a <a href="${anon_url('https://crontab.guru')}" target="_blank" rel="noreferrer">custom crontab</a>. Only standard cron values are valid.</span>
|
||||
<span id="custom_cron_message">
|
||||
Set the schedule for the newsletter using a <a href="${anon_url('https://crontab.guru')}" target="_blank" rel="noreferrer">custom crontab</a>.
|
||||
<a href="${anon_url('https://apscheduler.readthedocs.io/en/3.x/modules/triggers/cron.html#expression-types')}" target="_blank" rel="noreferrer">Click here</a> for a list of supported expressions.
|
||||
</span>
|
||||
</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
|
@ -481,7 +484,7 @@
|
|||
});
|
||||
|
||||
if (${newsletter['config']['custom_cron']}) {
|
||||
$('#cron_value').val('${newsletter['cron']}');
|
||||
$('#cron_value').val('${newsletter['cron'] | n}');
|
||||
} else {
|
||||
try {
|
||||
cron_widget.cron('value', '${newsletter['cron']}');
|
||||
|
|
|
@ -36,7 +36,7 @@ DOCUMENTATION :: END
|
|||
%>
|
||||
% if data:
|
||||
<div class="dashboard-recent-media-row">
|
||||
<div id="recently-added-row-scroller" style="left: 0;">
|
||||
<div id="recently-added-row-scroller">
|
||||
<ul class="dashboard-recent-media list-unstyled">
|
||||
% for item in data:
|
||||
<div class="dashboard-recent-media-instance">
|
||||
|
|
|
@ -125,10 +125,10 @@ DOCUMENTATION :: END
|
|||
<div class="table-card-header">
|
||||
<ul class="nav nav-header nav-dashboard pull-right">
|
||||
<li>
|
||||
<a href="#" id="recently-watched-page-left" class="paginate btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||
<a href="#" id="recently-watched-page-left" class="paginate-watched btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-left"></i></a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="#" id="recently-watched-page-right" class="paginate btn-gray" data-id="-1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||
<a href="#" id="recently-watched-page-right" class="paginate-watched btn-gray" data-id="+1"><i class="fa fa-lg fa-chevron-right"></i></a>
|
||||
</li>
|
||||
</ul>
|
||||
<div class="header-bar">
|
||||
|
@ -666,52 +666,14 @@ DOCUMENTATION :: END
|
|||
},
|
||||
complete: function(xhr, status) {
|
||||
$("#user-recently-watched").html(xhr.responseText);
|
||||
highlightWatchedScrollerButton();
|
||||
highlightScrollerButton("#recently-watched");
|
||||
paginateScroller("#recently-watched", ".paginate-watched");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
recentlyWatched();
|
||||
|
||||
function highlightWatchedScrollerButton() {
|
||||
var scroller = $("#recently-watched-row-scroller");
|
||||
var numElems = scroller.find("li").length;
|
||||
scroller.width(numElems * 175);
|
||||
if (scroller.width() > $("#user-recently-watched").width()) {
|
||||
$("#recently-watched-page-right").removeClass("disabled");
|
||||
} else {
|
||||
$("#recently-watched-page-right").addClass("disabled");
|
||||
}
|
||||
}
|
||||
|
||||
$(window).resize(function() {
|
||||
highlightWatchedScrollerButton();
|
||||
});
|
||||
|
||||
var leftTotal = 0;
|
||||
$(".paginate").click(function (e) {
|
||||
e.preventDefault();
|
||||
var scroller = $("#recently-watched-row-scroller");
|
||||
var containerWidth = $("#user-recently-watched").width();
|
||||
var scrollAmount = $(this).data("id") * parseInt(containerWidth / 175) * 175;
|
||||
var leftMax = Math.min(-parseInt(scroller.width()) + Math.abs(scrollAmount), 0);
|
||||
|
||||
leftTotal = Math.max(Math.min(leftTotal + scrollAmount, 0), leftMax);
|
||||
scroller.animate({ left: leftTotal }, 250);
|
||||
|
||||
if (leftTotal == 0) {
|
||||
$("#recently-watched-page-left").addClass("disabled").blur();
|
||||
} else {
|
||||
$("#recently-watched-page-left").removeClass("disabled");
|
||||
}
|
||||
|
||||
if (leftTotal == leftMax) {
|
||||
$("#recently-watched-page-right").addClass("disabled").blur();
|
||||
} else {
|
||||
$("#recently-watched-page-right").removeClass("disabled");
|
||||
}
|
||||
});
|
||||
|
||||
$(document).ready(function () {
|
||||
// Javascript to enable link to tab
|
||||
var hash = document.location.hash;
|
||||
|
|
|
@ -31,7 +31,7 @@ DOCUMENTATION :: END
|
|||
from plexpy.helpers import page, short_season
|
||||
%>
|
||||
<div class="dashboard-recent-media-row">
|
||||
<div id="recently-watched-row-scroller" style="left: 0;">
|
||||
<div id="recently-watched-row-scroller">
|
||||
<ul class="dashboard-recent-media list-unstyled">
|
||||
% for item in data:
|
||||
<li>
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from .core import contents, where
|
||||
|
||||
__all__ = ["contents", "where"]
|
||||
__version__ = "2024.07.04"
|
||||
__version__ = "2024.08.30"
|
||||
|
|
|
@ -4796,3 +4796,134 @@ PQQDAwNoADBlAjAdfKR7w4l1M+E7qUW/Runpod3JIha3RxEL2Jq68cgLcFBTApFw
|
|||
hVmpHqTm6iMxoAACMQD94vizrxa5HnPEluPBMBnYfubDl94cT7iJLzPrSA8Z94dG
|
||||
XSaQpYXFuXqUPoeovQA=
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA
|
||||
# Subject: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA
|
||||
# Label: "TWCA CYBER Root CA"
|
||||
# Serial: 85076849864375384482682434040119489222
|
||||
# MD5 Fingerprint: 0b:33:a0:97:52:95:d4:a9:fd:bb:db:6e:a3:55:5b:51
|
||||
# SHA1 Fingerprint: f6:b1:1c:1a:83:38:e9:7b:db:b3:a8:c8:33:24:e0:2d:9c:7f:26:66
|
||||
# SHA256 Fingerprint: 3f:63:bb:28:14:be:17:4e:c8:b6:43:9c:f0:8d:6d:56:f0:b7:c4:05:88:3a:56:48:a3:34:42:4d:6b:3e:c5:58
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFjTCCA3WgAwIBAgIQQAE0jMIAAAAAAAAAATzyxjANBgkqhkiG9w0BAQwFADBQ
|
||||
MQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FOLUNBMRAwDgYDVQQLEwdSb290
|
||||
IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3QgQ0EwHhcNMjIxMTIyMDY1NDI5
|
||||
WhcNNDcxMTIyMTU1OTU5WjBQMQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FO
|
||||
LUNBMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3Qg
|
||||
Q0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDG+Moe2Qkgfh1sTs6P
|
||||
40czRJzHyWmqOlt47nDSkvgEs1JSHWdyKKHfi12VCv7qze33Kc7wb3+szT3vsxxF
|
||||
avcokPFhV8UMxKNQXd7UtcsZyoC5dc4pztKFIuwCY8xEMCDa6pFbVuYdHNWdZsc/
|
||||
34bKS1PE2Y2yHer43CdTo0fhYcx9tbD47nORxc5zb87uEB8aBs/pJ2DFTxnk684i
|
||||
JkXXYJndzk834H/nY62wuFm40AZoNWDTNq5xQwTxaWV4fPMf88oon1oglWa0zbfu
|
||||
j3ikRRjpJi+NmykosaS3Om251Bw4ckVYsV7r8Cibt4LK/c/WMw+f+5eesRycnupf
|
||||
Xtuq3VTpMCEobY5583WSjCb+3MX2w7DfRFlDo7YDKPYIMKoNM+HvnKkHIuNZW0CP
|
||||
2oi3aQiotyMuRAlZN1vH4xfyIutuOVLF3lSnmMlLIJXcRolftBL5hSmO68gnFSDA
|
||||
S9TMfAxsNAwmmyYxpjyn9tnQS6Jk/zuZQXLB4HCX8SS7K8R0IrGsayIyJNN4KsDA
|
||||
oS/xUgXJP+92ZuJF2A09rZXIx4kmyA+upwMu+8Ff+iDhcK2wZSA3M2Cw1a/XDBzC
|
||||
kHDXShi8fgGwsOsVHkQGzaRP6AzRwyAQ4VRlnrZR0Bp2a0JaWHY06rc3Ga4udfmW
|
||||
5cFZ95RXKSWNOkyrTZpB0F8mAwIDAQABo2MwYTAOBgNVHQ8BAf8EBAMCAQYwDwYD
|
||||
VR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBSdhWEUfMFib5do5E83QOGt4A1WNzAd
|
||||
BgNVHQ4EFgQUnYVhFHzBYm+XaORPN0DhreANVjcwDQYJKoZIhvcNAQEMBQADggIB
|
||||
AGSPesRiDrWIzLjHhg6hShbNcAu3p4ULs3a2D6f/CIsLJc+o1IN1KriWiLb73y0t
|
||||
tGlTITVX1olNc79pj3CjYcya2x6a4CD4bLubIp1dhDGaLIrdaqHXKGnK/nZVekZn
|
||||
68xDiBaiA9a5F/gZbG0jAn/xX9AKKSM70aoK7akXJlQKTcKlTfjF/biBzysseKNn
|
||||
TKkHmvPfXvt89YnNdJdhEGoHK4Fa0o635yDRIG4kqIQnoVesqlVYL9zZyvpoBJ7t
|
||||
RCT5dEA7IzOrg1oYJkK2bVS1FmAwbLGg+LhBoF1JSdJlBTrq/p1hvIbZv97Tujqx
|
||||
f36SNI7JAG7cmL3c7IAFrQI932XtCwP39xaEBDG6k5TY8hL4iuO/Qq+n1M0RFxbI
|
||||
Qh0UqEL20kCGoE8jypZFVmAGzbdVAaYBlGX+bgUJurSkquLvWL69J1bY73NxW0Qz
|
||||
8ppy6rBePm6pUlvscG21h483XjyMnM7k8M4MZ0HMzvaAq07MTFb1wWFZk7Q+ptq4
|
||||
NxKfKjLji7gh7MMrZQzvIt6IKTtM1/r+t+FHvpw+PoP7UV31aPcuIYXcv/Fa4nzX
|
||||
xeSDwWrruoBa3lwtcHb4yOWHh8qgnaHlIhInD0Q9HWzq1MKLL295q39QpsQZp6F6
|
||||
t5b5wR9iWqJDB0BeJsas7a5wFsWqynKKTbDPAYsDP27X
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd.
|
||||
# Subject: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd.
|
||||
# Label: "SecureSign Root CA12"
|
||||
# Serial: 587887345431707215246142177076162061960426065942
|
||||
# MD5 Fingerprint: c6:89:ca:64:42:9b:62:08:49:0b:1e:7f:e9:07:3d:e8
|
||||
# SHA1 Fingerprint: 7a:22:1e:3d:de:1b:06:ac:9e:c8:47:70:16:8e:3c:e5:f7:6b:06:f4
|
||||
# SHA256 Fingerprint: 3f:03:4b:b5:70:4d:44:b2:d0:85:45:a0:20:57:de:93:eb:f3:90:5f:ce:72:1a:cb:c7:30:c0:6d:da:ee:90:4e
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDcjCCAlqgAwIBAgIUZvnHwa/swlG07VOX5uaCwysckBYwDQYJKoZIhvcNAQEL
|
||||
BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u
|
||||
LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExMjAeFw0yMDA0MDgw
|
||||
NTM2NDZaFw00MDA0MDgwNTM2NDZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD
|
||||
eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS
|
||||
b290IENBMTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC6OcE3emhF
|
||||
KxS06+QT61d1I02PJC0W6K6OyX2kVzsqdiUzg2zqMoqUm048luT9Ub+ZyZN+v/mt
|
||||
p7JIKwccJ/VMvHASd6SFVLX9kHrko+RRWAPNEHl57muTH2SOa2SroxPjcf59q5zd
|
||||
J1M3s6oYwlkm7Fsf0uZlfO+TvdhYXAvA42VvPMfKWeP+bl+sg779XSVOKik71gur
|
||||
FzJ4pOE+lEa+Ym6b3kaosRbnhW70CEBFEaCeVESE99g2zvVQR9wsMJvuwPWW0v4J
|
||||
hscGWa5Pro4RmHvzC1KqYiaqId+OJTN5lxZJjfU+1UefNzFJM3IFTQy2VYzxV4+K
|
||||
h9GtxRESOaCtAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
|
||||
AgEGMB0GA1UdDgQWBBRXNPN0zwRL1SXm8UC2LEzZLemgrTANBgkqhkiG9w0BAQsF
|
||||
AAOCAQEAPrvbFxbS8hQBICw4g0utvsqFepq2m2um4fylOqyttCg6r9cBg0krY6Ld
|
||||
mmQOmFxv3Y67ilQiLUoT865AQ9tPkbeGGuwAtEGBpE/6aouIs3YIcipJQMPTw4WJ
|
||||
mBClnW8Zt7vPemVV2zfrPIpyMpcemik+rY3moxtt9XUa5rBouVui7mlHJzWhhpmA
|
||||
8zNL4WukJsPvdFlseqJkth5Ew1DgDzk9qTPxpfPSvWKErI4cqc1avTc7bgoitPQV
|
||||
55FYxTpE05Uo2cBl6XLK0A+9H7MV2anjpEcJnuDLN/v9vZfVvhgaaaI5gdka9at/
|
||||
yOPiZwud9AzqVN/Ssq+xIvEg37xEHA==
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd.
|
||||
# Subject: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd.
|
||||
# Label: "SecureSign Root CA14"
|
||||
# Serial: 575790784512929437950770173562378038616896959179
|
||||
# MD5 Fingerprint: 71:0d:72:fa:92:19:65:5e:89:04:ac:16:33:f0:bc:d5
|
||||
# SHA1 Fingerprint: dd:50:c0:f7:79:b3:64:2e:74:a2:b8:9d:9f:d3:40:dd:bb:f0:f2:4f
|
||||
# SHA256 Fingerprint: 4b:00:9c:10:34:49:4f:9a:b5:6b:ba:3b:a1:d6:27:31:fc:4d:20:d8:95:5a:dc:ec:10:a9:25:60:72:61:e3:38
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFcjCCA1qgAwIBAgIUZNtaDCBO6Ncpd8hQJ6JaJ90t8sswDQYJKoZIhvcNAQEM
|
||||
BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u
|
||||
LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNDAeFw0yMDA0MDgw
|
||||
NzA2MTlaFw00NTA0MDgwNzA2MTlaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD
|
||||
eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS
|
||||
b290IENBMTQwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDF0nqh1oq/
|
||||
FjHQmNE6lPxauG4iwWL3pwon71D2LrGeaBLwbCRjOfHw3xDG3rdSINVSW0KZnvOg
|
||||
vlIfX8xnbacuUKLBl422+JX1sLrcneC+y9/3OPJH9aaakpUqYllQC6KxNedlsmGy
|
||||
6pJxaeQp8E+BgQQ8sqVb1MWoWWd7VRxJq3qdwudzTe/NCcLEVxLbAQ4jeQkHO6Lo
|
||||
/IrPj8BGJJw4J+CDnRugv3gVEOuGTgpa/d/aLIJ+7sr2KeH6caH3iGicnPCNvg9J
|
||||
kdjqOvn90Ghx2+m1K06Ckm9mH+Dw3EzsytHqunQG+bOEkJTRX45zGRBdAuVwpcAQ
|
||||
0BB8b8VYSbSwbprafZX1zNoCr7gsfXmPvkPx+SgojQlD+Ajda8iLLCSxjVIHvXib
|
||||
y8posqTdDEx5YMaZ0ZPxMBoH064iwurO8YQJzOAUbn8/ftKChazcqRZOhaBgy/ac
|
||||
18izju3Gm5h1DVXoX+WViwKkrkMpKBGk5hIwAUt1ax5mnXkvpXYvHUC0bcl9eQjs
|
||||
0Wq2XSqypWa9a4X0dFbD9ed1Uigspf9mR6XU/v6eVL9lfgHWMI+lNpyiUBzuOIAB
|
||||
SMbHdPTGrMNASRZhdCyvjG817XsYAFs2PJxQDcqSMxDxJklt33UkN4Ii1+iW/RVL
|
||||
ApY+B3KVfqs9TC7XyvDf4Fg/LS8EmjijAQIDAQABo0IwQDAPBgNVHRMBAf8EBTAD
|
||||
AQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUBpOjCl4oaTeqYR3r6/wtbyPk
|
||||
86AwDQYJKoZIhvcNAQEMBQADggIBAJaAcgkGfpzMkwQWu6A6jZJOtxEaCnFxEM0E
|
||||
rX+lRVAQZk5KQaID2RFPeje5S+LGjzJmdSX7684/AykmjbgWHfYfM25I5uj4V7Ib
|
||||
ed87hwriZLoAymzvftAj63iP/2SbNDefNWWipAA9EiOWWF3KY4fGoweITedpdopT
|
||||
zfFP7ELyk+OZpDc8h7hi2/DsHzc/N19DzFGdtfCXwreFamgLRB7lUe6TzktuhsHS
|
||||
DCRZNhqfLJGP4xjblJUK7ZGqDpncllPjYYPGFrojutzdfhrGe0K22VoF3Jpf1d+4
|
||||
2kd92jjbrDnVHmtsKheMYc2xbXIBw8MgAGJoFjHVdqqGuw6qnsb58Nn4DSEC5MUo
|
||||
FlkRudlpcyqSeLiSV5sI8jrlL5WwWLdrIBRtFO8KvH7YVdiI2i/6GaX7i+B/OfVy
|
||||
K4XELKzvGUWSTLNhB9xNH27SgRNcmvMSZ4PPmz+Ln52kuaiWA3rF7iDeM9ovnhp6
|
||||
dB7h7sxaOgTdsxoEqBRjrLdHEoOabPXm6RUVkRqEGQ6UROcSjiVbgGcZ3GOTEAtl
|
||||
Lor6CZpO2oYofaphNdgOpygau1LgePhsumywbrmHXumZNTfxPWQrqaA0k89jL9WB
|
||||
365jJ6UeTo3cKXhZ+PmhIIynJkBugnLNeLLIjzwec+fBH7/PzqUqm9tEZDKgu39c
|
||||
JRNItX+S
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd.
|
||||
# Subject: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd.
|
||||
# Label: "SecureSign Root CA15"
|
||||
# Serial: 126083514594751269499665114766174399806381178503
|
||||
# MD5 Fingerprint: 13:30:fc:c4:62:a6:a9:de:b5:c1:68:af:b5:d2:31:47
|
||||
# SHA1 Fingerprint: cb:ba:83:c8:c1:5a:5d:f1:f9:73:6f:ca:d7:ef:28:13:06:4a:07:7d
|
||||
# SHA256 Fingerprint: e7:78:f0:f0:95:fe:84:37:29:cd:1a:00:82:17:9e:53:14:a9:c2:91:44:28:05:e1:fb:1d:8f:b6:b8:88:6c:3a
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIICIzCCAamgAwIBAgIUFhXHw9hJp75pDIqI7fBw+d23PocwCgYIKoZIzj0EAwMw
|
||||
UTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28uLCBM
|
||||
dGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNTAeFw0yMDA0MDgwODMy
|
||||
NTZaFw00NTA0MDgwODMyNTZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpDeWJl
|
||||
cnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBSb290
|
||||
IENBMTUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQLUHSNZDKZmbPSYAi4Io5GdCx4
|
||||
wCtELW1fHcmuS1Iggz24FG1Th2CeX2yF2wYUleDHKP+dX+Sq8bOLbe1PL0vJSpSR
|
||||
ZHX+AezB2Ot6lHhWGENfa4HL9rzatAy2KZMIaY+jQjBAMA8GA1UdEwEB/wQFMAMB
|
||||
Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTrQciu/NWeUUj1vYv0hyCTQSvT
|
||||
9DAKBggqhkjOPQQDAwNoADBlAjEA2S6Jfl5OpBEHvVnCB96rMjhTKkZEBhd6zlHp
|
||||
4P9mLQlO4E/0BdGF9jVg3PVys0Z9AjBEmEYagoUeYWmJSwdLZrWeqrqgHkHZAXQ6
|
||||
bkU6iYAZezKYVWOr62Nuk22rGwlgMU4=
|
||||
-----END CERTIFICATE-----
|
||||
|
|
|
@ -1,4 +1,11 @@
|
|||
"""Read resources contained within a package."""
|
||||
"""
|
||||
Read resources contained within a package.
|
||||
|
||||
This codebase is shared between importlib.resources in the stdlib
|
||||
and importlib_resources in PyPI. See
|
||||
https://github.com/python/importlib_metadata/wiki/Development-Methodology
|
||||
for more detail.
|
||||
"""
|
||||
|
||||
from ._common import (
|
||||
as_file,
|
||||
|
@ -7,7 +14,7 @@ from ._common import (
|
|||
Anchor,
|
||||
)
|
||||
|
||||
from .functional import (
|
||||
from ._functional import (
|
||||
contents,
|
||||
is_resource,
|
||||
open_binary,
|
||||
|
|
|
@ -66,10 +66,10 @@ def get_resource_reader(package: types.ModuleType) -> Optional[ResourceReader]:
|
|||
# zipimport.zipimporter does not support weak references, resulting in a
|
||||
# TypeError. That seems terrible.
|
||||
spec = package.__spec__
|
||||
reader = getattr(spec.loader, 'get_resource_reader', None) # type: ignore
|
||||
reader = getattr(spec.loader, 'get_resource_reader', None) # type: ignore[union-attr]
|
||||
if reader is None:
|
||||
return None
|
||||
return reader(spec.name) # type: ignore
|
||||
return reader(spec.name) # type: ignore[union-attr]
|
||||
|
||||
|
||||
@functools.singledispatch
|
||||
|
@ -93,12 +93,13 @@ def _infer_caller():
|
|||
"""
|
||||
|
||||
def is_this_file(frame_info):
|
||||
return frame_info.filename == __file__
|
||||
return frame_info.filename == stack[0].filename
|
||||
|
||||
def is_wrapper(frame_info):
|
||||
return frame_info.function == 'wrapper'
|
||||
|
||||
not_this_file = itertools.filterfalse(is_this_file, inspect.stack())
|
||||
stack = inspect.stack()
|
||||
not_this_file = itertools.filterfalse(is_this_file, stack)
|
||||
# also exclude 'wrapper' due to singledispatch in the call stack
|
||||
callers = itertools.filterfalse(is_wrapper, not_this_file)
|
||||
return next(callers).frame
|
||||
|
@ -182,7 +183,7 @@ def _(path):
|
|||
@contextlib.contextmanager
|
||||
def _temp_path(dir: tempfile.TemporaryDirectory):
|
||||
"""
|
||||
Wrap tempfile.TemporyDirectory to return a pathlib object.
|
||||
Wrap tempfile.TemporaryDirectory to return a pathlib object.
|
||||
"""
|
||||
with dir as result:
|
||||
yield pathlib.Path(result)
|
||||
|
|
|
@ -5,6 +5,6 @@ __all__ = ['ZipPath']
|
|||
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
from zipfile import Path as ZipPath # type: ignore
|
||||
from zipfile import Path as ZipPath
|
||||
else:
|
||||
from zipp import Path as ZipPath # type: ignore
|
||||
from zipp import Path as ZipPath
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import collections
|
||||
import contextlib
|
||||
import itertools
|
||||
|
@ -5,6 +7,7 @@ import pathlib
|
|||
import operator
|
||||
import re
|
||||
import warnings
|
||||
from collections.abc import Iterator
|
||||
|
||||
from . import abc
|
||||
|
||||
|
@ -34,8 +37,10 @@ class FileReader(abc.TraversableResources):
|
|||
|
||||
class ZipReader(abc.TraversableResources):
|
||||
def __init__(self, loader, module):
|
||||
self.prefix = loader.prefix.replace('\\', '/')
|
||||
if loader.is_package(module):
|
||||
_, _, name = module.rpartition('.')
|
||||
self.prefix = loader.prefix.replace('\\', '/') + name + '/'
|
||||
self.prefix += name + '/'
|
||||
self.archive = loader.archive
|
||||
|
||||
def open_resource(self, resource):
|
||||
|
@ -133,27 +138,31 @@ class NamespaceReader(abc.TraversableResources):
|
|||
def __init__(self, namespace_path):
|
||||
if 'NamespacePath' not in str(namespace_path):
|
||||
raise ValueError('Invalid path')
|
||||
self.path = MultiplexedPath(*map(self._resolve, namespace_path))
|
||||
self.path = MultiplexedPath(*filter(bool, map(self._resolve, namespace_path)))
|
||||
|
||||
@classmethod
|
||||
def _resolve(cls, path_str) -> abc.Traversable:
|
||||
def _resolve(cls, path_str) -> abc.Traversable | None:
|
||||
r"""
|
||||
Given an item from a namespace path, resolve it to a Traversable.
|
||||
|
||||
path_str might be a directory on the filesystem or a path to a
|
||||
zipfile plus the path within the zipfile, e.g. ``/foo/bar`` or
|
||||
``/foo/baz.zip/inner_dir`` or ``foo\baz.zip\inner_dir\sub``.
|
||||
|
||||
path_str might also be a sentinel used by editable packages to
|
||||
trigger other behaviors (see python/importlib_resources#311).
|
||||
In that case, return None.
|
||||
"""
|
||||
(dir,) = (cand for cand in cls._candidate_paths(path_str) if cand.is_dir())
|
||||
return dir
|
||||
dirs = (cand for cand in cls._candidate_paths(path_str) if cand.is_dir())
|
||||
return next(dirs, None)
|
||||
|
||||
@classmethod
|
||||
def _candidate_paths(cls, path_str):
|
||||
def _candidate_paths(cls, path_str: str) -> Iterator[abc.Traversable]:
|
||||
yield pathlib.Path(path_str)
|
||||
yield from cls._resolve_zip_path(path_str)
|
||||
|
||||
@staticmethod
|
||||
def _resolve_zip_path(path_str):
|
||||
def _resolve_zip_path(path_str: str):
|
||||
for match in reversed(list(re.finditer(r'[\\/]', path_str))):
|
||||
with contextlib.suppress(
|
||||
FileNotFoundError,
|
||||
|
|
|
@ -77,7 +77,7 @@ class ResourceHandle(Traversable):
|
|||
|
||||
def __init__(self, parent: ResourceContainer, name: str):
|
||||
self.parent = parent
|
||||
self.name = name # type: ignore
|
||||
self.name = name # type: ignore[misc]
|
||||
|
||||
def is_file(self):
|
||||
return True
|
||||
|
|
|
@ -2,15 +2,44 @@ import pathlib
|
|||
import functools
|
||||
|
||||
from typing import Dict, Union
|
||||
from typing import runtime_checkable
|
||||
from typing import Protocol
|
||||
|
||||
|
||||
####
|
||||
# from jaraco.path 3.4.1
|
||||
|
||||
FilesSpec = Dict[str, Union[str, bytes, 'FilesSpec']] # type: ignore
|
||||
# from jaraco.path 3.7.1
|
||||
|
||||
|
||||
def build(spec: FilesSpec, prefix=pathlib.Path()):
|
||||
class Symlink(str):
|
||||
"""
|
||||
A string indicating the target of a symlink.
|
||||
"""
|
||||
|
||||
|
||||
FilesSpec = Dict[str, Union[str, bytes, Symlink, 'FilesSpec']]
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class TreeMaker(Protocol):
|
||||
def __truediv__(self, *args, **kwargs): ... # pragma: no cover
|
||||
|
||||
def mkdir(self, **kwargs): ... # pragma: no cover
|
||||
|
||||
def write_text(self, content, **kwargs): ... # pragma: no cover
|
||||
|
||||
def write_bytes(self, content): ... # pragma: no cover
|
||||
|
||||
def symlink_to(self, target): ... # pragma: no cover
|
||||
|
||||
|
||||
def _ensure_tree_maker(obj: Union[str, TreeMaker]) -> TreeMaker:
|
||||
return obj if isinstance(obj, TreeMaker) else pathlib.Path(obj) # type: ignore[return-value]
|
||||
|
||||
|
||||
def build(
|
||||
spec: FilesSpec,
|
||||
prefix: Union[str, TreeMaker] = pathlib.Path(), # type: ignore[assignment]
|
||||
):
|
||||
"""
|
||||
Build a set of files/directories, as described by the spec.
|
||||
|
||||
|
@ -25,21 +54,25 @@ def build(spec: FilesSpec, prefix=pathlib.Path()):
|
|||
... "__init__.py": "",
|
||||
... },
|
||||
... "baz.py": "# Some code",
|
||||
... }
|
||||
... "bar.py": Symlink("baz.py"),
|
||||
... },
|
||||
... "bing": Symlink("foo"),
|
||||
... }
|
||||
>>> target = getfixture('tmp_path')
|
||||
>>> build(spec, target)
|
||||
>>> target.joinpath('foo/baz.py').read_text(encoding='utf-8')
|
||||
'# Some code'
|
||||
>>> target.joinpath('bing/bar.py').read_text(encoding='utf-8')
|
||||
'# Some code'
|
||||
"""
|
||||
for name, contents in spec.items():
|
||||
create(contents, pathlib.Path(prefix) / name)
|
||||
create(contents, _ensure_tree_maker(prefix) / name)
|
||||
|
||||
|
||||
@functools.singledispatch
|
||||
def create(content: Union[str, bytes, FilesSpec], path):
|
||||
path.mkdir(exist_ok=True)
|
||||
build(content, prefix=path) # type: ignore
|
||||
build(content, prefix=path) # type: ignore[arg-type]
|
||||
|
||||
|
||||
@create.register
|
||||
|
@ -52,5 +85,10 @@ def _(content: str, path):
|
|||
path.write_text(content, encoding='utf-8')
|
||||
|
||||
|
||||
@create.register
|
||||
def _(content: Symlink, path):
|
||||
path.symlink_to(content)
|
||||
|
||||
|
||||
# end from jaraco.path
|
||||
####
|
||||
|
|
|
@ -8,3 +8,6 @@ import_helper = try_import('import_helper') or from_test_support(
|
|||
'modules_setup', 'modules_cleanup', 'DirsOnSysPath'
|
||||
)
|
||||
os_helper = try_import('os_helper') or from_test_support('temp_dir')
|
||||
warnings_helper = try_import('warnings_helper') or from_test_support(
|
||||
'ignore_warnings', 'check_warnings'
|
||||
)
|
||||
|
|
Binary file not shown.
Binary file not shown.
|
@ -1 +0,0 @@
|
|||
Hello, UTF-8 world!
|
|
@ -1 +0,0 @@
|
|||
one resource
|
|
@ -1 +0,0 @@
|
|||
a resource
|
|
@ -1 +0,0 @@
|
|||
two resource
|
Binary file not shown.
Binary file not shown.
|
@ -1 +0,0 @@
|
|||
Hello, UTF-8 world!
|
|
@ -1,7 +1,6 @@
|
|||
import unittest
|
||||
import importlib_resources as resources
|
||||
|
||||
from . import data01
|
||||
from . import util
|
||||
|
||||
|
||||
|
@ -19,16 +18,17 @@ class ContentsTests:
|
|||
assert self.expected <= contents
|
||||
|
||||
|
||||
class ContentsDiskTests(ContentsTests, unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.data = data01
|
||||
class ContentsDiskTests(ContentsTests, util.DiskSetup, unittest.TestCase):
|
||||
pass
|
||||
|
||||
|
||||
class ContentsZipTests(ContentsTests, util.ZipSetup, unittest.TestCase):
|
||||
pass
|
||||
|
||||
|
||||
class ContentsNamespaceTests(ContentsTests, unittest.TestCase):
|
||||
class ContentsNamespaceTests(ContentsTests, util.DiskSetup, unittest.TestCase):
|
||||
MODULE = 'namespacedata01'
|
||||
|
||||
expected = {
|
||||
# no __init__ because of namespace design
|
||||
'binary.file',
|
||||
|
@ -36,8 +36,3 @@ class ContentsNamespaceTests(ContentsTests, unittest.TestCase):
|
|||
'utf-16.file',
|
||||
'utf-8.file',
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
from . import namespacedata01
|
||||
|
||||
self.data = namespacedata01
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
import os
|
||||
import pathlib
|
||||
import py_compile
|
||||
import shutil
|
||||
import textwrap
|
||||
import unittest
|
||||
import warnings
|
||||
|
@ -6,11 +10,8 @@ import contextlib
|
|||
|
||||
import importlib_resources as resources
|
||||
from ..abc import Traversable
|
||||
from . import data01
|
||||
from . import util
|
||||
from . import _path
|
||||
from .compat.py39 import os_helper
|
||||
from .compat.py312 import import_helper
|
||||
from .compat.py39 import os_helper, import_helper
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
|
@ -48,70 +49,146 @@ class FilesTests:
|
|||
resources.files(package=self.data)
|
||||
|
||||
|
||||
class OpenDiskTests(FilesTests, unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.data = data01
|
||||
class OpenDiskTests(FilesTests, util.DiskSetup, unittest.TestCase):
|
||||
pass
|
||||
|
||||
|
||||
class OpenZipTests(FilesTests, util.ZipSetup, unittest.TestCase):
|
||||
pass
|
||||
|
||||
|
||||
class OpenNamespaceTests(FilesTests, unittest.TestCase):
|
||||
def setUp(self):
|
||||
from . import namespacedata01
|
||||
class OpenNamespaceTests(FilesTests, util.DiskSetup, unittest.TestCase):
|
||||
MODULE = 'namespacedata01'
|
||||
|
||||
self.data = namespacedata01
|
||||
def test_non_paths_in_dunder_path(self):
|
||||
"""
|
||||
Non-path items in a namespace package's ``__path__`` are ignored.
|
||||
|
||||
As reported in python/importlib_resources#311, some tools
|
||||
like Setuptools, when creating editable packages, will inject
|
||||
non-paths into a namespace package's ``__path__``, a
|
||||
sentinel like
|
||||
``__editable__.sample_namespace-1.0.finder.__path_hook__``
|
||||
to cause the ``PathEntryFinder`` to be called when searching
|
||||
for packages. In that case, resources should still be loadable.
|
||||
"""
|
||||
import namespacedata01
|
||||
|
||||
namespacedata01.__path__.append(
|
||||
'__editable__.sample_namespace-1.0.finder.__path_hook__'
|
||||
)
|
||||
|
||||
resources.files(namespacedata01)
|
||||
|
||||
|
||||
class OpenNamespaceZipTests(FilesTests, util.ZipSetup, unittest.TestCase):
|
||||
ZIP_MODULE = 'namespacedata01'
|
||||
|
||||
|
||||
class SiteDir:
|
||||
def setUp(self):
|
||||
self.fixtures = contextlib.ExitStack()
|
||||
self.addCleanup(self.fixtures.close)
|
||||
self.site_dir = self.fixtures.enter_context(os_helper.temp_dir())
|
||||
self.fixtures.enter_context(import_helper.DirsOnSysPath(self.site_dir))
|
||||
self.fixtures.enter_context(import_helper.isolated_modules())
|
||||
class DirectSpec:
|
||||
"""
|
||||
Override behavior of ModuleSetup to write a full spec directly.
|
||||
"""
|
||||
|
||||
MODULE = 'unused'
|
||||
|
||||
def load_fixture(self, name):
|
||||
self.tree_on_path(self.spec)
|
||||
|
||||
|
||||
class ModulesFilesTests(SiteDir, unittest.TestCase):
|
||||
def test_module_resources(self):
|
||||
"""
|
||||
A module can have resources found adjacent to the module.
|
||||
"""
|
||||
class ModulesFiles:
|
||||
spec = {
|
||||
'mod.py': '',
|
||||
'res.txt': 'resources are the best',
|
||||
}
|
||||
_path.build(spec, self.site_dir)
|
||||
import mod
|
||||
|
||||
def test_module_resources(self):
|
||||
"""
|
||||
A module can have resources found adjacent to the module.
|
||||
"""
|
||||
import mod # type: ignore[import-not-found]
|
||||
|
||||
actual = resources.files(mod).joinpath('res.txt').read_text(encoding='utf-8')
|
||||
assert actual == spec['res.txt']
|
||||
assert actual == self.spec['res.txt']
|
||||
|
||||
|
||||
class ImplicitContextFilesTests(SiteDir, unittest.TestCase):
|
||||
def test_implicit_files(self):
|
||||
"""
|
||||
Without any parameter, files() will infer the location as the caller.
|
||||
"""
|
||||
spec = {
|
||||
'somepkg': {
|
||||
'__init__.py': textwrap.dedent(
|
||||
"""
|
||||
import importlib_resources as res
|
||||
class ModuleFilesDiskTests(DirectSpec, util.DiskSetup, ModulesFiles, unittest.TestCase):
|
||||
pass
|
||||
|
||||
|
||||
class ModuleFilesZipTests(DirectSpec, util.ZipSetup, ModulesFiles, unittest.TestCase):
|
||||
pass
|
||||
|
||||
|
||||
class ImplicitContextFiles:
|
||||
set_val = textwrap.dedent(
|
||||
f"""
|
||||
import {resources.__name__} as res
|
||||
val = res.files().joinpath('res.txt').read_text(encoding='utf-8')
|
||||
"""
|
||||
),
|
||||
)
|
||||
spec = {
|
||||
'somepkg': {
|
||||
'__init__.py': set_val,
|
||||
'submod.py': set_val,
|
||||
'res.txt': 'resources are the best',
|
||||
},
|
||||
'frozenpkg': {
|
||||
'__init__.py': set_val.replace(resources.__name__, 'c_resources'),
|
||||
'res.txt': 'resources are the best',
|
||||
},
|
||||
}
|
||||
_path.build(spec, self.site_dir)
|
||||
|
||||
def test_implicit_files_package(self):
|
||||
"""
|
||||
Without any parameter, files() will infer the location as the caller.
|
||||
"""
|
||||
assert importlib.import_module('somepkg').val == 'resources are the best'
|
||||
|
||||
def test_implicit_files_submodule(self):
|
||||
"""
|
||||
Without any parameter, files() will infer the location as the caller.
|
||||
"""
|
||||
assert importlib.import_module('somepkg.submod').val == 'resources are the best'
|
||||
|
||||
def _compile_importlib(self):
|
||||
"""
|
||||
Make a compiled-only copy of the importlib resources package.
|
||||
"""
|
||||
bin_site = self.fixtures.enter_context(os_helper.temp_dir())
|
||||
c_resources = pathlib.Path(bin_site, 'c_resources')
|
||||
sources = pathlib.Path(resources.__file__).parent
|
||||
shutil.copytree(sources, c_resources, ignore=lambda *_: ['__pycache__'])
|
||||
|
||||
for dirpath, _, filenames in os.walk(c_resources):
|
||||
for filename in filenames:
|
||||
source_path = pathlib.Path(dirpath) / filename
|
||||
cfile = source_path.with_suffix('.pyc')
|
||||
py_compile.compile(source_path, cfile)
|
||||
pathlib.Path.unlink(source_path)
|
||||
self.fixtures.enter_context(import_helper.DirsOnSysPath(bin_site))
|
||||
|
||||
def test_implicit_files_with_compiled_importlib(self):
|
||||
"""
|
||||
Caller detection works for compiled-only resources module.
|
||||
|
||||
python/cpython#123085
|
||||
"""
|
||||
self._compile_importlib()
|
||||
assert importlib.import_module('frozenpkg').val == 'resources are the best'
|
||||
|
||||
|
||||
class ImplicitContextFilesDiskTests(
|
||||
DirectSpec, util.DiskSetup, ImplicitContextFiles, unittest.TestCase
|
||||
):
|
||||
pass
|
||||
|
||||
|
||||
class ImplicitContextFilesZipTests(
|
||||
DirectSpec, util.ZipSetup, ImplicitContextFiles, unittest.TestCase
|
||||
):
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
|
@ -1,31 +1,38 @@
|
|||
import unittest
|
||||
import os
|
||||
import contextlib
|
||||
import importlib
|
||||
|
||||
try:
|
||||
from test.support.warnings_helper import ignore_warnings, check_warnings
|
||||
except ImportError:
|
||||
# older Python versions
|
||||
from test.support import ignore_warnings, check_warnings
|
||||
from .compat.py39 import warnings_helper
|
||||
|
||||
import importlib_resources as resources
|
||||
|
||||
from . import util
|
||||
|
||||
# Since the functional API forwards to Traversable, we only test
|
||||
# filesystem resources here -- not zip files, namespace packages etc.
|
||||
# We do test for two kinds of Anchor, though.
|
||||
|
||||
|
||||
class StringAnchorMixin:
|
||||
anchor01 = 'importlib_resources.tests.data01'
|
||||
anchor02 = 'importlib_resources.tests.data02'
|
||||
anchor01 = 'data01'
|
||||
anchor02 = 'data02'
|
||||
|
||||
|
||||
class ModuleAnchorMixin:
|
||||
from . import data01 as anchor01
|
||||
from . import data02 as anchor02
|
||||
@property
|
||||
def anchor01(self):
|
||||
return importlib.import_module('data01')
|
||||
|
||||
@property
|
||||
def anchor02(self):
|
||||
return importlib.import_module('data02')
|
||||
|
||||
|
||||
class FunctionalAPIBase:
|
||||
class FunctionalAPIBase(util.DiskSetup):
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.load_fixture('data02')
|
||||
|
||||
def _gen_resourcetxt_path_parts(self):
|
||||
"""Yield various names of a text file in anchor02, each in a subTest"""
|
||||
for path_parts in (
|
||||
|
@ -36,6 +43,12 @@ class FunctionalAPIBase:
|
|||
with self.subTest(path_parts=path_parts):
|
||||
yield path_parts
|
||||
|
||||
def assertEndsWith(self, string, suffix):
|
||||
"""Assert that `string` ends with `suffix`.
|
||||
|
||||
Used to ignore an architecture-specific UTF-16 byte-order mark."""
|
||||
self.assertEqual(string[-len(suffix) :], suffix)
|
||||
|
||||
def test_read_text(self):
|
||||
self.assertEqual(
|
||||
resources.read_text(self.anchor01, 'utf-8.file'),
|
||||
|
@ -76,13 +89,13 @@ class FunctionalAPIBase:
|
|||
),
|
||||
'\x00\x01\x02\x03',
|
||||
)
|
||||
self.assertEqual(
|
||||
self.assertEndsWith( # ignore the BOM
|
||||
resources.read_text(
|
||||
self.anchor01,
|
||||
'utf-16.file',
|
||||
errors='backslashreplace',
|
||||
),
|
||||
'Hello, UTF-16 world!\n'.encode('utf-16').decode(
|
||||
'Hello, UTF-16 world!\n'.encode('utf-16-le').decode(
|
||||
errors='backslashreplace',
|
||||
),
|
||||
)
|
||||
|
@ -128,9 +141,9 @@ class FunctionalAPIBase:
|
|||
'utf-16.file',
|
||||
errors='backslashreplace',
|
||||
) as f:
|
||||
self.assertEqual(
|
||||
self.assertEndsWith( # ignore the BOM
|
||||
f.read(),
|
||||
'Hello, UTF-16 world!\n'.encode('utf-16').decode(
|
||||
'Hello, UTF-16 world!\n'.encode('utf-16-le').decode(
|
||||
errors='backslashreplace',
|
||||
),
|
||||
)
|
||||
|
@ -163,32 +176,32 @@ class FunctionalAPIBase:
|
|||
self.assertTrue(is_resource(self.anchor02, *path_parts))
|
||||
|
||||
def test_contents(self):
|
||||
with check_warnings((".*contents.*", DeprecationWarning)):
|
||||
with warnings_helper.check_warnings((".*contents.*", DeprecationWarning)):
|
||||
c = resources.contents(self.anchor01)
|
||||
self.assertGreaterEqual(
|
||||
set(c),
|
||||
{'utf-8.file', 'utf-16.file', 'binary.file', 'subdirectory'},
|
||||
)
|
||||
with contextlib.ExitStack() as cm:
|
||||
cm.enter_context(self.assertRaises(OSError))
|
||||
cm.enter_context(check_warnings((".*contents.*", DeprecationWarning)))
|
||||
|
||||
with self.assertRaises(OSError), warnings_helper.check_warnings((
|
||||
".*contents.*",
|
||||
DeprecationWarning,
|
||||
)):
|
||||
list(resources.contents(self.anchor01, 'utf-8.file'))
|
||||
|
||||
for path_parts in self._gen_resourcetxt_path_parts():
|
||||
with contextlib.ExitStack() as cm:
|
||||
cm.enter_context(self.assertRaises(OSError))
|
||||
cm.enter_context(check_warnings((".*contents.*", DeprecationWarning)))
|
||||
|
||||
with self.assertRaises(OSError), warnings_helper.check_warnings((
|
||||
".*contents.*",
|
||||
DeprecationWarning,
|
||||
)):
|
||||
list(resources.contents(self.anchor01, *path_parts))
|
||||
with check_warnings((".*contents.*", DeprecationWarning)):
|
||||
with warnings_helper.check_warnings((".*contents.*", DeprecationWarning)):
|
||||
c = resources.contents(self.anchor01, 'subdirectory')
|
||||
self.assertGreaterEqual(
|
||||
set(c),
|
||||
{'binary.file'},
|
||||
)
|
||||
|
||||
@ignore_warnings(category=DeprecationWarning)
|
||||
@warnings_helper.ignore_warnings(category=DeprecationWarning)
|
||||
def test_common_errors(self):
|
||||
for func in (
|
||||
resources.read_text,
|
||||
|
@ -227,16 +240,16 @@ class FunctionalAPIBase:
|
|||
|
||||
|
||||
class FunctionalAPITest_StringAnchor(
|
||||
unittest.TestCase,
|
||||
FunctionalAPIBase,
|
||||
StringAnchorMixin,
|
||||
FunctionalAPIBase,
|
||||
unittest.TestCase,
|
||||
):
|
||||
pass
|
||||
|
||||
|
||||
class FunctionalAPITest_ModuleAnchor(
|
||||
unittest.TestCase,
|
||||
FunctionalAPIBase,
|
||||
ModuleAnchorMixin,
|
||||
FunctionalAPIBase,
|
||||
unittest.TestCase,
|
||||
):
|
||||
pass
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import unittest
|
||||
|
||||
import importlib_resources as resources
|
||||
from . import data01
|
||||
from . import util
|
||||
|
||||
|
||||
|
@ -65,16 +64,12 @@ class OpenTests:
|
|||
target.open(encoding='utf-8')
|
||||
|
||||
|
||||
class OpenDiskTests(OpenTests, unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.data = data01
|
||||
class OpenDiskTests(OpenTests, util.DiskSetup, unittest.TestCase):
|
||||
pass
|
||||
|
||||
|
||||
class OpenDiskNamespaceTests(OpenTests, unittest.TestCase):
|
||||
def setUp(self):
|
||||
from . import namespacedata01
|
||||
|
||||
self.data = namespacedata01
|
||||
class OpenDiskNamespaceTests(OpenTests, util.DiskSetup, unittest.TestCase):
|
||||
MODULE = 'namespacedata01'
|
||||
|
||||
|
||||
class OpenZipTests(OpenTests, util.ZipSetup, unittest.TestCase):
|
||||
|
@ -82,7 +77,7 @@ class OpenZipTests(OpenTests, util.ZipSetup, unittest.TestCase):
|
|||
|
||||
|
||||
class OpenNamespaceZipTests(OpenTests, util.ZipSetup, unittest.TestCase):
|
||||
ZIP_MODULE = 'namespacedata01'
|
||||
MODULE = 'namespacedata01'
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -3,7 +3,6 @@ import pathlib
|
|||
import unittest
|
||||
|
||||
import importlib_resources as resources
|
||||
from . import data01
|
||||
from . import util
|
||||
|
||||
|
||||
|
@ -25,9 +24,7 @@ class PathTests:
|
|||
self.assertEqual('Hello, UTF-8 world!\n', path.read_text(encoding='utf-8'))
|
||||
|
||||
|
||||
class PathDiskTests(PathTests, unittest.TestCase):
|
||||
data = data01
|
||||
|
||||
class PathDiskTests(PathTests, util.DiskSetup, unittest.TestCase):
|
||||
def test_natural_path(self):
|
||||
"""
|
||||
Guarantee the internal implementation detail that
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import unittest
|
||||
import importlib_resources as resources
|
||||
|
||||
from . import data01
|
||||
from . import util
|
||||
from importlib import import_module
|
||||
|
||||
|
@ -52,8 +51,8 @@ class ReadTests:
|
|||
)
|
||||
|
||||
|
||||
class ReadDiskTests(ReadTests, unittest.TestCase):
|
||||
data = data01
|
||||
class ReadDiskTests(ReadTests, util.DiskSetup, unittest.TestCase):
|
||||
pass
|
||||
|
||||
|
||||
class ReadZipTests(ReadTests, util.ZipSetup, unittest.TestCase):
|
||||
|
@ -69,15 +68,12 @@ class ReadZipTests(ReadTests, util.ZipSetup, unittest.TestCase):
|
|||
self.assertEqual(result, bytes(range(4, 8)))
|
||||
|
||||
|
||||
class ReadNamespaceTests(ReadTests, unittest.TestCase):
|
||||
def setUp(self):
|
||||
from . import namespacedata01
|
||||
|
||||
self.data = namespacedata01
|
||||
class ReadNamespaceTests(ReadTests, util.DiskSetup, unittest.TestCase):
|
||||
MODULE = 'namespacedata01'
|
||||
|
||||
|
||||
class ReadNamespaceZipTests(ReadTests, util.ZipSetup, unittest.TestCase):
|
||||
ZIP_MODULE = 'namespacedata01'
|
||||
MODULE = 'namespacedata01'
|
||||
|
||||
def test_read_submodule_resource(self):
|
||||
submodule = import_module('namespacedata01.subdirectory')
|
||||
|
|
|
@ -1,16 +1,21 @@
|
|||
import os.path
|
||||
import sys
|
||||
import pathlib
|
||||
import unittest
|
||||
|
||||
from importlib import import_module
|
||||
from importlib_resources.readers import MultiplexedPath, NamespaceReader
|
||||
|
||||
from . import util
|
||||
|
||||
class MultiplexedPathTest(unittest.TestCase):
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.folder = pathlib.Path(__file__).parent / 'namespacedata01'
|
||||
|
||||
class MultiplexedPathTest(util.DiskSetup, unittest.TestCase):
|
||||
MODULE = 'namespacedata01'
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.folder = pathlib.Path(self.data.__path__[0])
|
||||
self.data01 = pathlib.Path(self.load_fixture('data01').__file__).parent
|
||||
self.data02 = pathlib.Path(self.load_fixture('data02').__file__).parent
|
||||
|
||||
def test_init_no_paths(self):
|
||||
with self.assertRaises(FileNotFoundError):
|
||||
|
@ -31,9 +36,8 @@ class MultiplexedPathTest(unittest.TestCase):
|
|||
)
|
||||
|
||||
def test_iterdir_duplicate(self):
|
||||
data01 = pathlib.Path(__file__).parent.joinpath('data01')
|
||||
contents = {
|
||||
path.name for path in MultiplexedPath(self.folder, data01).iterdir()
|
||||
path.name for path in MultiplexedPath(self.folder, self.data01).iterdir()
|
||||
}
|
||||
for remove in ('__pycache__', '__init__.pyc'):
|
||||
try:
|
||||
|
@ -61,9 +65,8 @@ class MultiplexedPathTest(unittest.TestCase):
|
|||
path.open()
|
||||
|
||||
def test_join_path(self):
|
||||
data01 = pathlib.Path(__file__).parent.joinpath('data01')
|
||||
prefix = str(data01.parent)
|
||||
path = MultiplexedPath(self.folder, data01)
|
||||
prefix = str(self.folder.parent)
|
||||
path = MultiplexedPath(self.folder, self.data01)
|
||||
self.assertEqual(
|
||||
str(path.joinpath('binary.file'))[len(prefix) + 1 :],
|
||||
os.path.join('namespacedata01', 'binary.file'),
|
||||
|
@ -83,10 +86,8 @@ class MultiplexedPathTest(unittest.TestCase):
|
|||
assert not path.joinpath('imaginary/foo.py').exists()
|
||||
|
||||
def test_join_path_common_subdir(self):
|
||||
data01 = pathlib.Path(__file__).parent.joinpath('data01')
|
||||
data02 = pathlib.Path(__file__).parent.joinpath('data02')
|
||||
prefix = str(data01.parent)
|
||||
path = MultiplexedPath(data01, data02)
|
||||
prefix = str(self.data02.parent)
|
||||
path = MultiplexedPath(self.data01, self.data02)
|
||||
self.assertIsInstance(path.joinpath('subdirectory'), MultiplexedPath)
|
||||
self.assertEqual(
|
||||
str(path.joinpath('subdirectory', 'subsubdir'))[len(prefix) + 1 :],
|
||||
|
@ -106,16 +107,8 @@ class MultiplexedPathTest(unittest.TestCase):
|
|||
)
|
||||
|
||||
|
||||
class NamespaceReaderTest(unittest.TestCase):
|
||||
site_dir = str(pathlib.Path(__file__).parent)
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
sys.path.append(cls.site_dir)
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
sys.path.remove(cls.site_dir)
|
||||
class NamespaceReaderTest(util.DiskSetup, unittest.TestCase):
|
||||
MODULE = 'namespacedata01'
|
||||
|
||||
def test_init_error(self):
|
||||
with self.assertRaises(ValueError):
|
||||
|
@ -125,7 +118,7 @@ class NamespaceReaderTest(unittest.TestCase):
|
|||
namespacedata01 = import_module('namespacedata01')
|
||||
reader = NamespaceReader(namespacedata01.__spec__.submodule_search_locations)
|
||||
|
||||
root = os.path.abspath(os.path.join(__file__, '..', 'namespacedata01'))
|
||||
root = self.data.__path__[0]
|
||||
self.assertEqual(
|
||||
reader.resource_path('binary.file'), os.path.join(root, 'binary.file')
|
||||
)
|
||||
|
@ -134,9 +127,8 @@ class NamespaceReaderTest(unittest.TestCase):
|
|||
)
|
||||
|
||||
def test_files(self):
|
||||
namespacedata01 = import_module('namespacedata01')
|
||||
reader = NamespaceReader(namespacedata01.__spec__.submodule_search_locations)
|
||||
root = os.path.abspath(os.path.join(__file__, '..', 'namespacedata01'))
|
||||
reader = NamespaceReader(self.data.__spec__.submodule_search_locations)
|
||||
root = self.data.__path__[0]
|
||||
self.assertIsInstance(reader.files(), MultiplexedPath)
|
||||
self.assertEqual(repr(reader.files()), f"MultiplexedPath('{root}')")
|
||||
|
||||
|
|
|
@ -1,9 +1,6 @@
|
|||
import sys
|
||||
import unittest
|
||||
import importlib_resources as resources
|
||||
import pathlib
|
||||
|
||||
from . import data01
|
||||
from . import util
|
||||
from importlib import import_module
|
||||
|
||||
|
@ -25,9 +22,8 @@ class ResourceTests:
|
|||
self.assertTrue(target.is_dir())
|
||||
|
||||
|
||||
class ResourceDiskTests(ResourceTests, unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.data = data01
|
||||
class ResourceDiskTests(ResourceTests, util.DiskSetup, unittest.TestCase):
|
||||
pass
|
||||
|
||||
|
||||
class ResourceZipTests(ResourceTests, util.ZipSetup, unittest.TestCase):
|
||||
|
@ -38,33 +34,39 @@ def names(traversable):
|
|||
return {item.name for item in traversable.iterdir()}
|
||||
|
||||
|
||||
class ResourceLoaderTests(unittest.TestCase):
|
||||
class ResourceLoaderTests(util.DiskSetup, unittest.TestCase):
|
||||
def test_resource_contents(self):
|
||||
package = util.create_package(
|
||||
file=data01, path=data01.__file__, contents=['A', 'B', 'C']
|
||||
file=self.data, path=self.data.__file__, contents=['A', 'B', 'C']
|
||||
)
|
||||
self.assertEqual(names(resources.files(package)), {'A', 'B', 'C'})
|
||||
|
||||
def test_is_file(self):
|
||||
package = util.create_package(
|
||||
file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F']
|
||||
file=self.data,
|
||||
path=self.data.__file__,
|
||||
contents=['A', 'B', 'C', 'D/E', 'D/F'],
|
||||
)
|
||||
self.assertTrue(resources.files(package).joinpath('B').is_file())
|
||||
|
||||
def test_is_dir(self):
|
||||
package = util.create_package(
|
||||
file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F']
|
||||
file=self.data,
|
||||
path=self.data.__file__,
|
||||
contents=['A', 'B', 'C', 'D/E', 'D/F'],
|
||||
)
|
||||
self.assertTrue(resources.files(package).joinpath('D').is_dir())
|
||||
|
||||
def test_resource_missing(self):
|
||||
package = util.create_package(
|
||||
file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F']
|
||||
file=self.data,
|
||||
path=self.data.__file__,
|
||||
contents=['A', 'B', 'C', 'D/E', 'D/F'],
|
||||
)
|
||||
self.assertFalse(resources.files(package).joinpath('Z').is_file())
|
||||
|
||||
|
||||
class ResourceCornerCaseTests(unittest.TestCase):
|
||||
class ResourceCornerCaseTests(util.DiskSetup, unittest.TestCase):
|
||||
def test_package_has_no_reader_fallback(self):
|
||||
"""
|
||||
Test odd ball packages which:
|
||||
|
@ -73,7 +75,7 @@ class ResourceCornerCaseTests(unittest.TestCase):
|
|||
# 3. Are not in a zip file
|
||||
"""
|
||||
module = util.create_package(
|
||||
file=data01, path=data01.__file__, contents=['A', 'B', 'C']
|
||||
file=self.data, path=self.data.__file__, contents=['A', 'B', 'C']
|
||||
)
|
||||
# Give the module a dummy loader.
|
||||
module.__loader__ = object()
|
||||
|
@ -84,9 +86,7 @@ class ResourceCornerCaseTests(unittest.TestCase):
|
|||
self.assertFalse(resources.files(module).joinpath('A').is_file())
|
||||
|
||||
|
||||
class ResourceFromZipsTest01(util.ZipSetupBase, unittest.TestCase):
|
||||
ZIP_MODULE = 'data01'
|
||||
|
||||
class ResourceFromZipsTest01(util.ZipSetup, unittest.TestCase):
|
||||
def test_is_submodule_resource(self):
|
||||
submodule = import_module('data01.subdirectory')
|
||||
self.assertTrue(resources.files(submodule).joinpath('binary.file').is_file())
|
||||
|
@ -117,8 +117,8 @@ class ResourceFromZipsTest01(util.ZipSetupBase, unittest.TestCase):
|
|||
assert not data.parent.exists()
|
||||
|
||||
|
||||
class ResourceFromZipsTest02(util.ZipSetupBase, unittest.TestCase):
|
||||
ZIP_MODULE = 'data02'
|
||||
class ResourceFromZipsTest02(util.ZipSetup, unittest.TestCase):
|
||||
MODULE = 'data02'
|
||||
|
||||
def test_unrelated_contents(self):
|
||||
"""
|
||||
|
@ -135,7 +135,7 @@ class ResourceFromZipsTest02(util.ZipSetupBase, unittest.TestCase):
|
|||
)
|
||||
|
||||
|
||||
class DeletingZipsTest(util.ZipSetupBase, unittest.TestCase):
|
||||
class DeletingZipsTest(util.ZipSetup, unittest.TestCase):
|
||||
"""Having accessed resources in a zip file should not keep an open
|
||||
reference to the zip.
|
||||
"""
|
||||
|
@ -217,24 +217,20 @@ class ResourceFromNamespaceTests:
|
|||
self.assertEqual(contents, {'binary.file'})
|
||||
|
||||
|
||||
class ResourceFromNamespaceDiskTests(ResourceFromNamespaceTests, unittest.TestCase):
|
||||
site_dir = str(pathlib.Path(__file__).parent)
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
sys.path.append(cls.site_dir)
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
sys.path.remove(cls.site_dir)
|
||||
|
||||
|
||||
class ResourceFromNamespaceZipTests(
|
||||
util.ZipSetupBase,
|
||||
class ResourceFromNamespaceDiskTests(
|
||||
util.DiskSetup,
|
||||
ResourceFromNamespaceTests,
|
||||
unittest.TestCase,
|
||||
):
|
||||
ZIP_MODULE = 'namespacedata01'
|
||||
MODULE = 'namespacedata01'
|
||||
|
||||
|
||||
class ResourceFromNamespaceZipTests(
|
||||
util.ZipSetup,
|
||||
ResourceFromNamespaceTests,
|
||||
unittest.TestCase,
|
||||
):
|
||||
MODULE = 'namespacedata01'
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -6,10 +6,10 @@ import types
|
|||
import pathlib
|
||||
import contextlib
|
||||
|
||||
from . import data01
|
||||
from ..abc import ResourceReader
|
||||
from .compat.py39 import import_helper, os_helper
|
||||
from . import zip as zip_
|
||||
from . import _path
|
||||
|
||||
|
||||
from importlib.machinery import ModuleSpec
|
||||
|
@ -68,7 +68,7 @@ def create_package(file=None, path=None, is_package=True, contents=()):
|
|||
)
|
||||
|
||||
|
||||
class CommonTests(metaclass=abc.ABCMeta):
|
||||
class CommonTestsBase(metaclass=abc.ABCMeta):
|
||||
"""
|
||||
Tests shared by test_open, test_path, and test_read.
|
||||
"""
|
||||
|
@ -84,34 +84,34 @@ class CommonTests(metaclass=abc.ABCMeta):
|
|||
"""
|
||||
Passing in the package name should succeed.
|
||||
"""
|
||||
self.execute(data01.__name__, 'utf-8.file')
|
||||
self.execute(self.data.__name__, 'utf-8.file')
|
||||
|
||||
def test_package_object(self):
|
||||
"""
|
||||
Passing in the package itself should succeed.
|
||||
"""
|
||||
self.execute(data01, 'utf-8.file')
|
||||
self.execute(self.data, 'utf-8.file')
|
||||
|
||||
def test_string_path(self):
|
||||
"""
|
||||
Passing in a string for the path should succeed.
|
||||
"""
|
||||
path = 'utf-8.file'
|
||||
self.execute(data01, path)
|
||||
self.execute(self.data, path)
|
||||
|
||||
def test_pathlib_path(self):
|
||||
"""
|
||||
Passing in a pathlib.PurePath object for the path should succeed.
|
||||
"""
|
||||
path = pathlib.PurePath('utf-8.file')
|
||||
self.execute(data01, path)
|
||||
self.execute(self.data, path)
|
||||
|
||||
def test_importing_module_as_side_effect(self):
|
||||
"""
|
||||
The anchor package can already be imported.
|
||||
"""
|
||||
del sys.modules[data01.__name__]
|
||||
self.execute(data01.__name__, 'utf-8.file')
|
||||
del sys.modules[self.data.__name__]
|
||||
self.execute(self.data.__name__, 'utf-8.file')
|
||||
|
||||
def test_missing_path(self):
|
||||
"""
|
||||
|
@ -141,24 +141,66 @@ class CommonTests(metaclass=abc.ABCMeta):
|
|||
self.execute(package, 'utf-8.file')
|
||||
|
||||
|
||||
class ZipSetupBase:
|
||||
ZIP_MODULE = 'data01'
|
||||
fixtures = dict(
|
||||
data01={
|
||||
'__init__.py': '',
|
||||
'binary.file': bytes(range(4)),
|
||||
'utf-16.file': 'Hello, UTF-16 world!\n'.encode('utf-16'),
|
||||
'utf-8.file': 'Hello, UTF-8 world!\n'.encode('utf-8'),
|
||||
'subdirectory': {
|
||||
'__init__.py': '',
|
||||
'binary.file': bytes(range(4, 8)),
|
||||
},
|
||||
},
|
||||
data02={
|
||||
'__init__.py': '',
|
||||
'one': {'__init__.py': '', 'resource1.txt': 'one resource'},
|
||||
'two': {'__init__.py': '', 'resource2.txt': 'two resource'},
|
||||
'subdirectory': {'subsubdir': {'resource.txt': 'a resource'}},
|
||||
},
|
||||
namespacedata01={
|
||||
'binary.file': bytes(range(4)),
|
||||
'utf-16.file': 'Hello, UTF-16 world!\n'.encode('utf-16'),
|
||||
'utf-8.file': 'Hello, UTF-8 world!\n'.encode('utf-8'),
|
||||
'subdirectory': {
|
||||
'binary.file': bytes(range(12, 16)),
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class ModuleSetup:
|
||||
def setUp(self):
|
||||
self.fixtures = contextlib.ExitStack()
|
||||
self.addCleanup(self.fixtures.close)
|
||||
|
||||
self.fixtures.enter_context(import_helper.isolated_modules())
|
||||
self.data = self.load_fixture(self.MODULE)
|
||||
|
||||
def load_fixture(self, module):
|
||||
self.tree_on_path({module: fixtures[module]})
|
||||
return importlib.import_module(module)
|
||||
|
||||
|
||||
class ZipSetup(ModuleSetup):
|
||||
MODULE = 'data01'
|
||||
|
||||
def tree_on_path(self, spec):
|
||||
temp_dir = self.fixtures.enter_context(os_helper.temp_dir())
|
||||
modules = pathlib.Path(temp_dir) / 'zipped modules.zip'
|
||||
src_path = pathlib.Path(__file__).parent.joinpath(self.ZIP_MODULE)
|
||||
self.fixtures.enter_context(
|
||||
import_helper.DirsOnSysPath(str(zip_.make_zip_file(src_path, modules)))
|
||||
import_helper.DirsOnSysPath(str(zip_.make_zip_file(spec, modules)))
|
||||
)
|
||||
|
||||
self.data = importlib.import_module(self.ZIP_MODULE)
|
||||
|
||||
class DiskSetup(ModuleSetup):
|
||||
MODULE = 'data01'
|
||||
|
||||
def tree_on_path(self, spec):
|
||||
temp_dir = self.fixtures.enter_context(os_helper.temp_dir())
|
||||
_path.build(spec, pathlib.Path(temp_dir))
|
||||
self.fixtures.enter_context(import_helper.DirsOnSysPath(temp_dir))
|
||||
|
||||
|
||||
class ZipSetup(ZipSetupBase):
|
||||
class CommonTests(DiskSetup, CommonTestsBase):
|
||||
pass
|
||||
|
|
|
@ -2,31 +2,25 @@
|
|||
Generate zip test data files.
|
||||
"""
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
import pathlib
|
||||
import zipfile
|
||||
|
||||
import zipp
|
||||
|
||||
|
||||
def make_zip_file(src, dst):
|
||||
def make_zip_file(tree, dst):
|
||||
"""
|
||||
Zip the files in src into a new zipfile at dst.
|
||||
Zip the files in tree into a new zipfile at dst.
|
||||
"""
|
||||
with zipfile.ZipFile(dst, 'w') as zf:
|
||||
for src_path, rel in walk(src):
|
||||
dst_name = src.name / pathlib.PurePosixPath(rel.as_posix())
|
||||
zf.write(src_path, dst_name)
|
||||
for name, contents in walk(tree):
|
||||
zf.writestr(name, contents)
|
||||
zipp.CompleteDirs.inject(zf)
|
||||
return dst
|
||||
|
||||
|
||||
def walk(datapath):
|
||||
for dirpath, dirnames, filenames in os.walk(datapath):
|
||||
with contextlib.suppress(ValueError):
|
||||
dirnames.remove('__pycache__')
|
||||
for filename in filenames:
|
||||
res = pathlib.Path(dirpath) / filename
|
||||
rel = res.relative_to(datapath)
|
||||
yield res, rel
|
||||
def walk(tree, prefix=''):
|
||||
for name, contents in tree.items():
|
||||
if isinstance(contents, dict):
|
||||
yield from walk(contents, prefix=f'{prefix}{name}/')
|
||||
else:
|
||||
yield f'{prefix}{name}', contents
|
||||
|
|
|
@ -193,6 +193,7 @@ class Artist(
|
|||
similar (List<:class:`~plexapi.media.Similar`>): List of similar objects.
|
||||
styles (List<:class:`~plexapi.media.Style`>): List of style objects.
|
||||
theme (str): URL to theme resource (/library/metadata/<ratingkey>/theme/<themeid>).
|
||||
ultraBlurColors (:class:`~plexapi.media.UltraBlurColors`): Ultra blur color object.
|
||||
"""
|
||||
TAG = 'Directory'
|
||||
TYPE = 'artist'
|
||||
|
@ -213,6 +214,7 @@ class Artist(
|
|||
self.similar = self.findItems(data, media.Similar)
|
||||
self.styles = self.findItems(data, media.Style)
|
||||
self.theme = data.attrib.get('theme')
|
||||
self.ultraBlurColors = self.findItem(data, media.UltraBlurColors)
|
||||
|
||||
def __iter__(self):
|
||||
for album in self.albums():
|
||||
|
@ -281,6 +283,21 @@ class Artist(
|
|||
filepaths += track.download(_savepath, keep_original_name, **kwargs)
|
||||
return filepaths
|
||||
|
||||
def popularTracks(self):
|
||||
""" Returns a list of :class:`~plexapi.audio.Track` popular tracks by the artist. """
|
||||
filters = {
|
||||
'album.subformat!': 'Compilation,Live',
|
||||
'artist.id': self.ratingKey,
|
||||
'group': 'title',
|
||||
'ratingCount>>': 0,
|
||||
}
|
||||
return self.section().search(
|
||||
libtype='track',
|
||||
filters=filters,
|
||||
sort='ratingCount:desc',
|
||||
limit=100
|
||||
)
|
||||
|
||||
def station(self):
|
||||
""" Returns a :class:`~plexapi.playlist.Playlist` artist radio station or `None`. """
|
||||
key = f'{self.key}?includeStations=1'
|
||||
|
@ -325,6 +342,7 @@ class Album(
|
|||
studio (str): Studio that released the album.
|
||||
styles (List<:class:`~plexapi.media.Style`>): List of style objects.
|
||||
subformats (List<:class:`~plexapi.media.Subformat`>): List of subformat objects.
|
||||
ultraBlurColors (:class:`~plexapi.media.UltraBlurColors`): Ultra blur color object.
|
||||
viewedLeafCount (int): Number of items marked as played in the album view.
|
||||
year (int): Year the album was released.
|
||||
"""
|
||||
|
@ -354,6 +372,7 @@ class Album(
|
|||
self.studio = data.attrib.get('studio')
|
||||
self.styles = self.findItems(data, media.Style)
|
||||
self.subformats = self.findItems(data, media.Subformat)
|
||||
self.ultraBlurColors = self.findItem(data, media.UltraBlurColors)
|
||||
self.viewedLeafCount = utils.cast(int, data.attrib.get('viewedLeafCount'))
|
||||
self.year = utils.cast(int, data.attrib.get('year'))
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@ import re
|
|||
from typing import TYPE_CHECKING, Generic, Iterable, List, Optional, TypeVar, Union
|
||||
import weakref
|
||||
from functools import cached_property
|
||||
from urllib.parse import urlencode
|
||||
from urllib.parse import parse_qsl, urlencode, urlparse
|
||||
from xml.etree import ElementTree
|
||||
from xml.etree.ElementTree import Element
|
||||
|
||||
|
@ -391,10 +391,9 @@ class PlexObject:
|
|||
|
||||
Parameters:
|
||||
key (string, optional): Override the key to reload.
|
||||
**kwargs (dict): A dictionary of XML include parameters to exclude or override.
|
||||
All parameters are included by default with the option to override each parameter
|
||||
or disable each parameter individually by setting it to False or 0.
|
||||
**kwargs (dict): A dictionary of XML include parameters to include/exclude or override.
|
||||
See :class:`~plexapi.base.PlexPartialObject` for all the available include parameters.
|
||||
Set parameter to True to include and False to exclude.
|
||||
|
||||
Example:
|
||||
|
||||
|
@ -402,20 +401,28 @@ class PlexObject:
|
|||
|
||||
from plexapi.server import PlexServer
|
||||
plex = PlexServer('http://localhost:32400', token='xxxxxxxxxxxxxxxxxxxx')
|
||||
movie = plex.library.section('Movies').get('Cars')
|
||||
|
||||
# Partial reload of the movie without the `checkFiles` parameter.
|
||||
# Excluding `checkFiles` will prevent the Plex server from reading the
|
||||
# file to check if the file still exists and is accessible.
|
||||
# The movie object will remain as a partial object.
|
||||
movie.reload(checkFiles=False)
|
||||
# Search results are partial objects.
|
||||
movie = plex.library.section('Movies').get('Cars')
|
||||
movie.isPartialObject() # Returns True
|
||||
|
||||
# Full reload of the movie with all include parameters.
|
||||
# Partial reload of the movie without a default include parameter.
|
||||
# The movie object will remain as a partial object.
|
||||
movie.reload(includeMarkers=False)
|
||||
movie.isPartialObject() # Returns True
|
||||
|
||||
# Full reload of the movie with all default include parameters.
|
||||
# The movie object will be a full object.
|
||||
movie.reload()
|
||||
movie.isFullObject() # Returns True
|
||||
|
||||
# Full reload of the movie with all default and extra include parameter.
|
||||
# Including `checkFiles` will tell the Plex server to check if the file
|
||||
# still exists and is accessible.
|
||||
# The movie object will be a full object.
|
||||
movie.reload(checkFiles=True)
|
||||
movie.isFullObject() # Returns True
|
||||
|
||||
"""
|
||||
return self._reload(key=key, **kwargs)
|
||||
|
||||
|
@ -505,25 +512,25 @@ class PlexPartialObject(PlexObject):
|
|||
automatically and update itself.
|
||||
"""
|
||||
_INCLUDES = {
|
||||
'checkFiles': 1,
|
||||
'includeAllConcerts': 1,
|
||||
'checkFiles': 0,
|
||||
'includeAllConcerts': 0,
|
||||
'includeBandwidths': 1,
|
||||
'includeChapters': 1,
|
||||
'includeChildren': 1,
|
||||
'includeConcerts': 1,
|
||||
'includeExternalMedia': 1,
|
||||
'includeExtras': 1,
|
||||
'includeChildren': 0,
|
||||
'includeConcerts': 0,
|
||||
'includeExternalMedia': 0,
|
||||
'includeExtras': 0,
|
||||
'includeFields': 'thumbBlurHash,artBlurHash',
|
||||
'includeGeolocation': 1,
|
||||
'includeLoudnessRamps': 1,
|
||||
'includeMarkers': 1,
|
||||
'includeOnDeck': 1,
|
||||
'includePopularLeaves': 1,
|
||||
'includePreferences': 1,
|
||||
'includeRelated': 1,
|
||||
'includeRelatedCount': 1,
|
||||
'includeReviews': 1,
|
||||
'includeStations': 1,
|
||||
'includeOnDeck': 0,
|
||||
'includePopularLeaves': 0,
|
||||
'includePreferences': 0,
|
||||
'includeRelated': 0,
|
||||
'includeRelatedCount': 0,
|
||||
'includeReviews': 0,
|
||||
'includeStations': 0,
|
||||
}
|
||||
_EXCLUDES = {
|
||||
'excludeElements': (
|
||||
|
@ -592,7 +599,11 @@ class PlexPartialObject(PlexObject):
|
|||
search result for a movie often only contain a portion of the attributes a full
|
||||
object (main url) for that movie would contain.
|
||||
"""
|
||||
return not self.key or (self._details_key or self.key) == self._initpath
|
||||
parsed_key = urlparse(self._details_key or self.key)
|
||||
parsed_initpath = urlparse(self._initpath)
|
||||
query_key = set(parse_qsl(parsed_key.query))
|
||||
query_init = set(parse_qsl(parsed_initpath.query))
|
||||
return not self.key or (parsed_key.path == parsed_initpath.path and query_key <= query_init)
|
||||
|
||||
def isPartialObject(self):
|
||||
""" Returns True if this is not a full object. """
|
||||
|
|
|
@ -197,7 +197,7 @@ class PlexClient(PlexObject):
|
|||
raise NotFound(message)
|
||||
else:
|
||||
raise BadRequest(message)
|
||||
data = response.text.encode('utf8')
|
||||
data = utils.cleanXMLString(response.text).encode('utf8')
|
||||
return ElementTree.fromstring(data) if data.strip() else None
|
||||
|
||||
def sendCommand(self, command, proxy=None, **params):
|
||||
|
|
|
@ -60,6 +60,7 @@ class Collection(
|
|||
title (str): Name of the collection.
|
||||
titleSort (str): Title to use when sorting (defaults to title).
|
||||
type (str): 'collection'
|
||||
ultraBlurColors (:class:`~plexapi.media.UltraBlurColors`): Ultra blur color object.
|
||||
updatedAt (datetime): Datetime the collection was updated.
|
||||
userRating (float): Rating of the collection (0.0 - 10.0) equaling (0 stars - 5 stars).
|
||||
"""
|
||||
|
@ -102,6 +103,7 @@ class Collection(
|
|||
self.title = data.attrib.get('title')
|
||||
self.titleSort = data.attrib.get('titleSort', self.title)
|
||||
self.type = data.attrib.get('type')
|
||||
self.ultraBlurColors = self.findItem(data, media.UltraBlurColors)
|
||||
self.updatedAt = utils.toDatetime(data.attrib.get('updatedAt'))
|
||||
self.userRating = utils.cast(float, data.attrib.get('userRating'))
|
||||
self._items = None # cache for self.items
|
||||
|
|
|
@ -4,6 +4,6 @@
|
|||
# Library version
|
||||
MAJOR_VERSION = 4
|
||||
MINOR_VERSION = 15
|
||||
PATCH_VERSION = 15
|
||||
PATCH_VERSION = 16
|
||||
__short_version__ = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__ = f"{__short_version__}.{PATCH_VERSION}"
|
||||
|
|
|
@ -2823,7 +2823,8 @@ class FilteringType(PlexObject):
|
|||
additionalFields.extend([
|
||||
('duration', 'integer', 'Duration'),
|
||||
('viewOffset', 'integer', 'View Offset'),
|
||||
('label', 'tag', 'Label')
|
||||
('label', 'tag', 'Label'),
|
||||
('ratingCount', 'integer', 'Rating Count'),
|
||||
])
|
||||
elif self.type == 'collection':
|
||||
additionalFields.extend([
|
||||
|
|
|
@ -106,12 +106,16 @@ class MediaPart(PlexObject):
|
|||
Attributes:
|
||||
TAG (str): 'Part'
|
||||
accessible (bool): True if the file is accessible.
|
||||
Requires reloading the media with ``checkFiles=True``.
|
||||
Refer to :func:`~plexapi.base.PlexObject.reload`.
|
||||
audioProfile (str): The audio profile of the file.
|
||||
container (str): The container type of the file (ex: avi).
|
||||
decision (str): Unknown.
|
||||
deepAnalysisVersion (int): The Plex deep analysis version for the file.
|
||||
duration (int): The duration of the file in milliseconds.
|
||||
exists (bool): True if the file exists.
|
||||
Requires reloading the media with ``checkFiles=True``.
|
||||
Refer to :func:`~plexapi.base.PlexObject.reload`.
|
||||
file (str): The path to this file on disk (ex: /media/Movies/Cars (2006)/Cars (2006).mkv)
|
||||
has64bitOffsets (bool): True if the file has 64 bit offsets.
|
||||
hasThumbnail (bool): True if the file (track) has an embedded thumbnail.
|
||||
|
@ -999,6 +1003,28 @@ class Review(PlexObject):
|
|||
self.text = data.attrib.get('text')
|
||||
|
||||
|
||||
@utils.registerPlexObject
|
||||
class UltraBlurColors(PlexObject):
|
||||
""" Represents a single UltraBlurColors media tag.
|
||||
|
||||
Attributes:
|
||||
TAG (str): 'UltraBlurColors'
|
||||
bottomLeft (str): The bottom left hex color.
|
||||
bottomRight (str): The bottom right hex color.
|
||||
topLeft (str): The top left hex color.
|
||||
topRight (str): The top right hex color.
|
||||
"""
|
||||
TAG = 'UltraBlurColors'
|
||||
|
||||
def _loadData(self, data):
|
||||
""" Load attribute values from Plex XML response. """
|
||||
self._data = data
|
||||
self.bottomLeft = data.attrib.get('bottomLeft')
|
||||
self.bottomRight = data.attrib.get('bottomRight')
|
||||
self.topLeft = data.attrib.get('topLeft')
|
||||
self.topRight = data.attrib.get('topRight')
|
||||
|
||||
|
||||
class BaseResource(PlexObject):
|
||||
""" Base class for all Art, Poster, and Theme objects.
|
||||
|
||||
|
|
|
@ -14,8 +14,8 @@ class AdvancedSettingsMixin:
|
|||
|
||||
def preferences(self):
|
||||
""" Returns a list of :class:`~plexapi.settings.Preferences` objects. """
|
||||
data = self._server.query(self._details_key)
|
||||
return self.findItems(data, settings.Preferences, rtag='Preferences')
|
||||
key = f'{self.key}?includePreferences=1'
|
||||
return self.fetchItems(key, cls=settings.Preferences, rtag='Preferences')
|
||||
|
||||
def preference(self, pref):
|
||||
""" Returns a :class:`~plexapi.settings.Preferences` object for the specified pref.
|
||||
|
@ -240,8 +240,7 @@ class UnmatchMatchMixin:
|
|||
params['agent'] = utils.getAgentIdentifier(self.section(), agent)
|
||||
|
||||
key = key + '?' + urlencode(params)
|
||||
data = self._server.query(key, method=self._server._session.get)
|
||||
return self.findItems(data, initpath=key)
|
||||
return self.fetchItems(key, cls=media.SearchResult)
|
||||
|
||||
def fixMatch(self, searchResult=None, auto=False, agent=None):
|
||||
""" Use match result to update show metadata.
|
||||
|
@ -278,8 +277,8 @@ class ExtrasMixin:
|
|||
def extras(self):
|
||||
""" Returns a list of :class:`~plexapi.video.Extra` objects. """
|
||||
from plexapi.video import Extra
|
||||
data = self._server.query(self._details_key)
|
||||
return self.findItems(data, Extra, rtag='Extras')
|
||||
key = f'{self.key}/extras'
|
||||
return self.fetchItems(key, cls=Extra)
|
||||
|
||||
|
||||
class HubsMixin:
|
||||
|
@ -289,8 +288,7 @@ class HubsMixin:
|
|||
""" Returns a list of :class:`~plexapi.library.Hub` objects. """
|
||||
from plexapi.library import Hub
|
||||
key = f'{self.key}/related'
|
||||
data = self._server.query(key)
|
||||
return self.findItems(data, Hub)
|
||||
return self.fetchItems(key, cls=Hub)
|
||||
|
||||
|
||||
class PlayedUnplayedMixin:
|
||||
|
|
|
@ -250,7 +250,7 @@ class MyPlexAccount(PlexObject):
|
|||
return response.json()
|
||||
elif 'text/plain' in response.headers.get('Content-Type', ''):
|
||||
return response.text.strip()
|
||||
data = response.text.encode('utf8')
|
||||
data = utils.cleanXMLString(response.text).encode('utf8')
|
||||
return ElementTree.fromstring(data) if data.strip() else None
|
||||
|
||||
def ping(self):
|
||||
|
|
|
@ -768,7 +768,7 @@ class PlexServer(PlexObject):
|
|||
raise NotFound(message)
|
||||
else:
|
||||
raise BadRequest(message)
|
||||
data = response.text.encode('utf8')
|
||||
data = utils.cleanXMLString(response.text).encode('utf8')
|
||||
return ElementTree.fromstring(data) if data.strip() else None
|
||||
|
||||
def search(self, query, mediatype=None, limit=None, sectionId=None):
|
||||
|
|
|
@ -6,6 +6,7 @@ import logging
|
|||
import os
|
||||
import re
|
||||
import string
|
||||
import sys
|
||||
import time
|
||||
import unicodedata
|
||||
import warnings
|
||||
|
@ -673,3 +674,45 @@ def openOrRead(file):
|
|||
def sha1hash(guid):
|
||||
""" Return the SHA1 hash of a guid. """
|
||||
return sha1(guid.encode('utf-8')).hexdigest()
|
||||
|
||||
|
||||
# https://stackoverflow.com/a/64570125
|
||||
_illegal_XML_characters = [
|
||||
(0x00, 0x08),
|
||||
(0x0B, 0x0C),
|
||||
(0x0E, 0x1F),
|
||||
(0x7F, 0x84),
|
||||
(0x86, 0x9F),
|
||||
(0xFDD0, 0xFDDF),
|
||||
(0xFFFE, 0xFFFF),
|
||||
]
|
||||
if sys.maxunicode >= 0x10000: # not narrow build
|
||||
_illegal_XML_characters.extend(
|
||||
[
|
||||
(0x1FFFE, 0x1FFFF),
|
||||
(0x2FFFE, 0x2FFFF),
|
||||
(0x3FFFE, 0x3FFFF),
|
||||
(0x4FFFE, 0x4FFFF),
|
||||
(0x5FFFE, 0x5FFFF),
|
||||
(0x6FFFE, 0x6FFFF),
|
||||
(0x7FFFE, 0x7FFFF),
|
||||
(0x8FFFE, 0x8FFFF),
|
||||
(0x9FFFE, 0x9FFFF),
|
||||
(0xAFFFE, 0xAFFFF),
|
||||
(0xBFFFE, 0xBFFFF),
|
||||
(0xCFFFE, 0xCFFFF),
|
||||
(0xDFFFE, 0xDFFFF),
|
||||
(0xEFFFE, 0xEFFFF),
|
||||
(0xFFFFE, 0xFFFFF),
|
||||
(0x10FFFE, 0x10FFFF),
|
||||
]
|
||||
)
|
||||
_illegal_XML_ranges = [
|
||||
fr'{chr(low)}-{chr(high)}'
|
||||
for (low, high) in _illegal_XML_characters
|
||||
]
|
||||
_illegal_XML_re = re.compile(fr'[{"".join(_illegal_XML_ranges)}]')
|
||||
|
||||
|
||||
def cleanXMLString(s):
|
||||
return _illegal_XML_re.sub('', s)
|
||||
|
|
|
@ -375,6 +375,7 @@ class Movie(
|
|||
studio (str): Studio that created movie (Di Bonaventura Pictures; 21 Laps Entertainment).
|
||||
tagline (str): Movie tag line (Back 2 Work; Who says men can't change?).
|
||||
theme (str): URL to theme resource (/library/metadata/<ratingkey>/theme/<themeid>).
|
||||
ultraBlurColors (:class:`~plexapi.media.UltraBlurColors`): Ultra blur color object.
|
||||
useOriginalTitle (int): Setting that indicates if the original title is used for the movie
|
||||
(-1 = Library default, 0 = No, 1 = Yes).
|
||||
viewOffset (int): View offset in milliseconds.
|
||||
|
@ -420,6 +421,7 @@ class Movie(
|
|||
self.studio = data.attrib.get('studio')
|
||||
self.tagline = data.attrib.get('tagline')
|
||||
self.theme = data.attrib.get('theme')
|
||||
self.ultraBlurColors = self.findItem(data, media.UltraBlurColors)
|
||||
self.useOriginalTitle = utils.cast(int, data.attrib.get('useOriginalTitle', '-1'))
|
||||
self.viewOffset = utils.cast(int, data.attrib.get('viewOffset', 0))
|
||||
self.writers = self.findItems(data, media.Writer)
|
||||
|
@ -456,8 +458,8 @@ class Movie(
|
|||
|
||||
def reviews(self):
|
||||
""" Returns a list of :class:`~plexapi.media.Review` objects. """
|
||||
data = self._server.query(self._details_key)
|
||||
return self.findItems(data, media.Review, rtag='Video')
|
||||
key = f'{self.key}?includeReviews=1'
|
||||
return self.fetchItems(key, cls=media.Review, rtag='Video')
|
||||
|
||||
def editions(self):
|
||||
""" Returns a list of :class:`~plexapi.video.Movie` objects
|
||||
|
@ -543,6 +545,7 @@ class Show(
|
|||
(-1 = Account default, 0 = Manually selected, 1 = Shown with foreign audio, 2 = Always enabled).
|
||||
tagline (str): Show tag line.
|
||||
theme (str): URL to theme resource (/library/metadata/<ratingkey>/theme/<themeid>).
|
||||
ultraBlurColors (:class:`~plexapi.media.UltraBlurColors`): Ultra blur color object.
|
||||
useOriginalTitle (int): Setting that indicates if the original title is used for the show
|
||||
(-1 = Library default, 0 = No, 1 = Yes).
|
||||
viewedLeafCount (int): Number of items marked as played in the show view.
|
||||
|
@ -592,6 +595,7 @@ class Show(
|
|||
self.subtitleMode = utils.cast(int, data.attrib.get('subtitleMode', '-1'))
|
||||
self.tagline = data.attrib.get('tagline')
|
||||
self.theme = data.attrib.get('theme')
|
||||
self.ultraBlurColors = self.findItem(data, media.UltraBlurColors)
|
||||
self.useOriginalTitle = utils.cast(int, data.attrib.get('useOriginalTitle', '-1'))
|
||||
self.viewedLeafCount = utils.cast(int, data.attrib.get('viewedLeafCount'))
|
||||
self.year = utils.cast(int, data.attrib.get('year'))
|
||||
|
@ -614,8 +618,8 @@ class Show(
|
|||
""" Returns show's On Deck :class:`~plexapi.video.Video` object or `None`.
|
||||
If show is unwatched, return will likely be the first episode.
|
||||
"""
|
||||
data = self._server.query(self._details_key)
|
||||
return next(iter(self.findItems(data, rtag='OnDeck')), None)
|
||||
key = f'{self.key}?includeOnDeck=1'
|
||||
return next(iter(self.fetchItems(key, cls=Episode, rtag='OnDeck')), None)
|
||||
|
||||
def season(self, title=None, season=None):
|
||||
""" Returns the season with the specified title or number.
|
||||
|
@ -735,6 +739,7 @@ class Season(
|
|||
subtitleLanguage (str): Setting that indicates the preferred subtitle language.
|
||||
subtitleMode (int): Setting that indicates the auto-select subtitle mode.
|
||||
(-1 = Series default, 0 = Manually selected, 1 = Shown with foreign audio, 2 = Always enabled).
|
||||
ultraBlurColors (:class:`~plexapi.media.UltraBlurColors`): Ultra blur color object.
|
||||
viewedLeafCount (int): Number of items marked as played in the season view.
|
||||
year (int): Year the season was released.
|
||||
"""
|
||||
|
@ -766,6 +771,7 @@ class Season(
|
|||
self.ratings = self.findItems(data, media.Rating)
|
||||
self.subtitleLanguage = data.attrib.get('subtitleLanguage', '')
|
||||
self.subtitleMode = utils.cast(int, data.attrib.get('subtitleMode', '-1'))
|
||||
self.ultraBlurColors = self.findItem(data, media.UltraBlurColors)
|
||||
self.viewedLeafCount = utils.cast(int, data.attrib.get('viewedLeafCount'))
|
||||
self.year = utils.cast(int, data.attrib.get('year'))
|
||||
|
||||
|
@ -796,8 +802,8 @@ class Season(
|
|||
""" Returns season's On Deck :class:`~plexapi.video.Video` object or `None`.
|
||||
Will only return a match if the show's On Deck episode is in this season.
|
||||
"""
|
||||
data = self._server.query(self._details_key)
|
||||
return next(iter(self.findItems(data, rtag='OnDeck')), None)
|
||||
key = f'{self.key}?includeOnDeck=1'
|
||||
return next(iter(self.fetchItems(key, cls=Episode, rtag='OnDeck')), None)
|
||||
|
||||
def episode(self, title=None, episode=None):
|
||||
""" Returns the episode with the given title or number.
|
||||
|
@ -914,6 +920,7 @@ class Episode(
|
|||
skipParent (bool): True if the show's seasons are set to hidden.
|
||||
sourceURI (str): Remote server URI (server://<machineIdentifier>/com.plexapp.plugins.library)
|
||||
(remote playlist item only).
|
||||
ultraBlurColors (:class:`~plexapi.media.UltraBlurColors`): Ultra blur color object.
|
||||
viewOffset (int): View offset in milliseconds.
|
||||
writers (List<:class:`~plexapi.media.Writer`>): List of writers objects.
|
||||
year (int): Year the episode was released.
|
||||
|
@ -958,6 +965,7 @@ class Episode(
|
|||
self.roles = self.findItems(data, media.Role)
|
||||
self.skipParent = utils.cast(bool, data.attrib.get('skipParent', '0'))
|
||||
self.sourceURI = data.attrib.get('source') # remote playlist item
|
||||
self.ultraBlurColors = self.findItem(data, media.UltraBlurColors)
|
||||
self.viewOffset = utils.cast(int, data.attrib.get('viewOffset', 0))
|
||||
self.writers = self.findItems(data, media.Writer)
|
||||
self.year = utils.cast(int, data.attrib.get('year'))
|
||||
|
|
|
@ -120,8 +120,8 @@ class version_info(NamedTuple):
|
|||
return f"{__name__}.{type(self).__name__}({', '.join('{}={!r}'.format(*nv) for nv in zip(self._fields, self))})"
|
||||
|
||||
|
||||
__version_info__ = version_info(3, 1, 2, "final", 1)
|
||||
__version_time__ = "06 Mar 2024 07:08 UTC"
|
||||
__version_info__ = version_info(3, 1, 4, "final", 1)
|
||||
__version_time__ = "25 Aug 2024 14:40 UTC"
|
||||
__version__ = __version_info__.__version__
|
||||
__versionTime__ = __version_time__
|
||||
__author__ = "Paul McGuire <ptmcg.gm+pyparsing@gmail.com>"
|
||||
|
@ -143,7 +143,7 @@ from .common import (
|
|||
_builtin_exprs as common_builtin_exprs,
|
||||
)
|
||||
|
||||
# define backward compat synonyms
|
||||
# Compatibility synonyms
|
||||
if "pyparsing_unicode" not in globals():
|
||||
pyparsing_unicode = unicode # type: ignore[misc]
|
||||
if "pyparsing_common" not in globals():
|
||||
|
|
|
@ -196,7 +196,7 @@ def with_class(classname, namespace=""):
|
|||
return with_attribute(**{classattr: classname})
|
||||
|
||||
|
||||
# pre-PEP8 compatibility symbols
|
||||
# Compatibility synonyms
|
||||
# fmt: off
|
||||
replaceWith = replaced_by_pep8("replaceWith", replace_with)
|
||||
removeQuotes = replaced_by_pep8("removeQuotes", remove_quotes)
|
||||
|
|
|
@ -418,20 +418,15 @@ class pyparsing_common:
|
|||
# fmt: on
|
||||
|
||||
# pre-PEP8 compatibility names
|
||||
convertToInteger = convert_to_integer
|
||||
"""Deprecated - use :class:`convert_to_integer`"""
|
||||
convertToFloat = convert_to_float
|
||||
"""Deprecated - use :class:`convert_to_float`"""
|
||||
convertToDate = convert_to_date
|
||||
"""Deprecated - use :class:`convert_to_date`"""
|
||||
convertToDatetime = convert_to_datetime
|
||||
"""Deprecated - use :class:`convert_to_datetime`"""
|
||||
stripHTMLTags = strip_html_tags
|
||||
"""Deprecated - use :class:`strip_html_tags`"""
|
||||
upcaseTokens = upcase_tokens
|
||||
"""Deprecated - use :class:`upcase_tokens`"""
|
||||
downcaseTokens = downcase_tokens
|
||||
"""Deprecated - use :class:`downcase_tokens`"""
|
||||
# fmt: off
|
||||
convertToInteger = staticmethod(replaced_by_pep8("convertToInteger", convert_to_integer))
|
||||
convertToFloat = staticmethod(replaced_by_pep8("convertToFloat", convert_to_float))
|
||||
convertToDate = staticmethod(replaced_by_pep8("convertToDate", convert_to_date))
|
||||
convertToDatetime = staticmethod(replaced_by_pep8("convertToDatetime", convert_to_datetime))
|
||||
stripHTMLTags = staticmethod(replaced_by_pep8("stripHTMLTags", strip_html_tags))
|
||||
upcaseTokens = staticmethod(replaced_by_pep8("upcaseTokens", upcase_tokens))
|
||||
downcaseTokens = staticmethod(replaced_by_pep8("downcaseTokens", downcase_tokens))
|
||||
# fmt: on
|
||||
|
||||
|
||||
_builtin_exprs = [
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -36,6 +36,7 @@ jinja2_template_source = """\
|
|||
</head>
|
||||
<body>
|
||||
{% endif %}
|
||||
<meta charset="UTF-8"/>
|
||||
{{ body | safe }}
|
||||
{% for diagram in diagrams %}
|
||||
<div class="railroad-group">
|
||||
|
@ -89,7 +90,7 @@ class AnnotatedItem(railroad.Group):
|
|||
"""
|
||||
|
||||
def __init__(self, label: str, item):
|
||||
super().__init__(item=item, label="[{}]".format(label) if label else label)
|
||||
super().__init__(item=item, label=f"[{label}]")
|
||||
|
||||
|
||||
class EditablePartial(Generic[T]):
|
||||
|
@ -145,7 +146,7 @@ def railroad_to_html(diagrams: List[NamedDiagram], embed=False, **kwargs) -> str
|
|||
continue
|
||||
io = StringIO()
|
||||
try:
|
||||
css = kwargs.get('css')
|
||||
css = kwargs.get("css")
|
||||
diagram.diagram.writeStandalone(io.write, css=css)
|
||||
except AttributeError:
|
||||
diagram.diagram.writeSvg(io.write)
|
||||
|
@ -425,9 +426,11 @@ def _apply_diagram_item_enhancements(fn):
|
|||
element_results_name = element.resultsName
|
||||
if element_results_name:
|
||||
# add "*" to indicate if this is a "list all results" name
|
||||
element_results_name += "" if element.modalResults else "*"
|
||||
modal_tag = "" if element.modalResults else "*"
|
||||
ret = EditablePartial.from_call(
|
||||
railroad.Group, item=ret, label=element_results_name
|
||||
railroad.Group,
|
||||
item=ret,
|
||||
label=f"{repr(element_results_name)}{modal_tag}",
|
||||
)
|
||||
|
||||
return ret
|
||||
|
@ -534,7 +537,7 @@ def _to_diagram_element(
|
|||
# (all will have the same name, and resultsName)
|
||||
if not exprs:
|
||||
return None
|
||||
if len(set((e.name, e.resultsName) for e in exprs)) == 1:
|
||||
if len(set((e.name, e.resultsName) for e in exprs)) == 1 and len(exprs) > 2:
|
||||
ret = EditablePartial.from_call(
|
||||
railroad.OneOrMore, item="", repeat=str(len(exprs))
|
||||
)
|
||||
|
@ -563,7 +566,7 @@ def _to_diagram_element(
|
|||
if show_groups:
|
||||
ret = EditablePartial.from_call(AnnotatedItem, label="", item="")
|
||||
else:
|
||||
ret = EditablePartial.from_call(railroad.Group, label="", item="")
|
||||
ret = EditablePartial.from_call(railroad.Sequence, items=[])
|
||||
elif isinstance(element, pyparsing.TokenConverter):
|
||||
label = type(element).__name__.lower()
|
||||
if label == "tokenconverter":
|
||||
|
@ -573,8 +576,36 @@ def _to_diagram_element(
|
|||
elif isinstance(element, pyparsing.Opt):
|
||||
ret = EditablePartial.from_call(railroad.Optional, item="")
|
||||
elif isinstance(element, pyparsing.OneOrMore):
|
||||
ret = EditablePartial.from_call(railroad.OneOrMore, item="")
|
||||
if element.not_ender is not None:
|
||||
args = [
|
||||
parent,
|
||||
lookup,
|
||||
vertical,
|
||||
index,
|
||||
name_hint,
|
||||
show_results_names,
|
||||
show_groups,
|
||||
]
|
||||
return _to_diagram_element(
|
||||
(~element.not_ender.expr + element.expr)[1, ...].set_name(element.name),
|
||||
*args,
|
||||
)
|
||||
ret = EditablePartial.from_call(railroad.OneOrMore, item=None)
|
||||
elif isinstance(element, pyparsing.ZeroOrMore):
|
||||
if element.not_ender is not None:
|
||||
args = [
|
||||
parent,
|
||||
lookup,
|
||||
vertical,
|
||||
index,
|
||||
name_hint,
|
||||
show_results_names,
|
||||
show_groups,
|
||||
]
|
||||
return _to_diagram_element(
|
||||
(~element.not_ender.expr + element.expr)[...].set_name(element.name),
|
||||
*args,
|
||||
)
|
||||
ret = EditablePartial.from_call(railroad.ZeroOrMore, item="")
|
||||
elif isinstance(element, pyparsing.Group):
|
||||
ret = EditablePartial.from_call(
|
||||
|
|
|
@ -85,7 +85,7 @@ class ParseBaseException(Exception):
|
|||
ret = []
|
||||
if isinstance(exc, ParseBaseException):
|
||||
ret.append(exc.line)
|
||||
ret.append(" " * (exc.column - 1) + "^")
|
||||
ret.append(f"{' ' * (exc.column - 1)}^")
|
||||
ret.append(f"{type(exc).__name__}: {exc}")
|
||||
|
||||
if depth <= 0:
|
||||
|
@ -245,6 +245,7 @@ class ParseBaseException(Exception):
|
|||
"""
|
||||
return self.explain_exception(self, depth)
|
||||
|
||||
# Compatibility synonyms
|
||||
# fmt: off
|
||||
markInputline = replaced_by_pep8("markInputline", mark_input_line)
|
||||
# fmt: on
|
||||
|
|
|
@ -782,9 +782,12 @@ def infix_notation(
|
|||
|
||||
# if lpar and rpar are not suppressed, wrap in group
|
||||
if not (isinstance(lpar, Suppress) and isinstance(rpar, Suppress)):
|
||||
lastExpr = base_expr | Group(lpar + ret + rpar)
|
||||
lastExpr = base_expr | Group(lpar + ret + rpar).set_name(
|
||||
f"nested_{base_expr.name}"
|
||||
)
|
||||
else:
|
||||
lastExpr = base_expr | (lpar + ret + rpar)
|
||||
lastExpr = base_expr | (lpar + ret + rpar).set_name(f"nested_{base_expr.name}")
|
||||
root_expr = lastExpr
|
||||
|
||||
arity: int
|
||||
rightLeftAssoc: opAssoc
|
||||
|
@ -855,6 +858,7 @@ def infix_notation(
|
|||
thisExpr <<= (matchExpr | lastExpr).setName(term_name)
|
||||
lastExpr = thisExpr
|
||||
ret <<= lastExpr
|
||||
root_expr.set_name("base_expr")
|
||||
return ret
|
||||
|
||||
|
||||
|
@ -1049,7 +1053,7 @@ def delimited_list(
|
|||
)
|
||||
|
||||
|
||||
# pre-PEP8 compatible names
|
||||
# Compatibility synonyms
|
||||
# fmt: off
|
||||
opAssoc = OpAssoc
|
||||
anyOpenTag = any_open_tag
|
||||
|
|
|
@ -4,12 +4,14 @@ from collections.abc import (
|
|||
Mapping,
|
||||
MutableSequence,
|
||||
Iterator,
|
||||
Sequence,
|
||||
Container,
|
||||
Iterable,
|
||||
)
|
||||
import pprint
|
||||
from typing import Tuple, Any, Dict, Set, List
|
||||
|
||||
from .util import replaced_by_pep8
|
||||
|
||||
|
||||
str_type: Tuple[type, ...] = (str, bytes)
|
||||
_generator_type = type((_ for _ in ()))
|
||||
|
||||
|
@ -573,20 +575,20 @@ class ParseResults:
|
|||
# replace values with copies if they are of known mutable types
|
||||
for i, obj in enumerate(self._toklist):
|
||||
if isinstance(obj, ParseResults):
|
||||
self._toklist[i] = obj.deepcopy()
|
||||
ret._toklist[i] = obj.deepcopy()
|
||||
elif isinstance(obj, (str, bytes)):
|
||||
pass
|
||||
elif isinstance(obj, MutableMapping):
|
||||
self._toklist[i] = dest = type(obj)()
|
||||
ret._toklist[i] = dest = type(obj)()
|
||||
for k, v in obj.items():
|
||||
dest[k] = v.deepcopy() if isinstance(v, ParseResults) else v
|
||||
elif isinstance(obj, Container):
|
||||
self._toklist[i] = type(obj)(
|
||||
elif isinstance(obj, Iterable):
|
||||
ret._toklist[i] = type(obj)(
|
||||
v.deepcopy() if isinstance(v, ParseResults) else v for v in obj
|
||||
)
|
||||
return ret
|
||||
|
||||
def get_name(self):
|
||||
def get_name(self) -> str:
|
||||
r"""
|
||||
Returns the results name for this token expression. Useful when several
|
||||
different expressions might match at a particular location.
|
||||
|
|
|
@ -53,51 +53,51 @@ class unicode_set:
|
|||
_ranges: UnicodeRangeList = []
|
||||
|
||||
@_lazyclassproperty
|
||||
def _chars_for_ranges(cls):
|
||||
ret = []
|
||||
def _chars_for_ranges(cls) -> List[str]:
|
||||
ret: List[int] = []
|
||||
for cc in cls.__mro__:
|
||||
if cc is unicode_set:
|
||||
break
|
||||
for rr in getattr(cc, "_ranges", ()):
|
||||
ret.extend(range(rr[0], rr[-1] + 1))
|
||||
return [chr(c) for c in sorted(set(ret))]
|
||||
return sorted(chr(c) for c in set(ret))
|
||||
|
||||
@_lazyclassproperty
|
||||
def printables(cls):
|
||||
def printables(cls) -> str:
|
||||
"""all non-whitespace characters in this range"""
|
||||
return "".join(filterfalse(str.isspace, cls._chars_for_ranges))
|
||||
|
||||
@_lazyclassproperty
|
||||
def alphas(cls):
|
||||
def alphas(cls) -> str:
|
||||
"""all alphabetic characters in this range"""
|
||||
return "".join(filter(str.isalpha, cls._chars_for_ranges))
|
||||
|
||||
@_lazyclassproperty
|
||||
def nums(cls):
|
||||
def nums(cls) -> str:
|
||||
"""all numeric digit characters in this range"""
|
||||
return "".join(filter(str.isdigit, cls._chars_for_ranges))
|
||||
|
||||
@_lazyclassproperty
|
||||
def alphanums(cls):
|
||||
def alphanums(cls) -> str:
|
||||
"""all alphanumeric characters in this range"""
|
||||
return cls.alphas + cls.nums
|
||||
|
||||
@_lazyclassproperty
|
||||
def identchars(cls):
|
||||
def identchars(cls) -> str:
|
||||
"""all characters in this range that are valid identifier characters, plus underscore '_'"""
|
||||
return "".join(
|
||||
sorted(
|
||||
set(
|
||||
"".join(filter(str.isidentifier, cls._chars_for_ranges))
|
||||
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµº"
|
||||
+ "ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿ"
|
||||
+ "_"
|
||||
set(filter(str.isidentifier, cls._chars_for_ranges))
|
||||
| set(
|
||||
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµº"
|
||||
"ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿ"
|
||||
"_"
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
@_lazyclassproperty
|
||||
def identbodychars(cls):
|
||||
def identbodychars(cls) -> str:
|
||||
"""
|
||||
all characters in this range that are valid identifier body characters,
|
||||
plus the digits 0-9, and · (Unicode MIDDLE DOT)
|
||||
|
@ -105,7 +105,9 @@ class unicode_set:
|
|||
identifier_chars = set(
|
||||
c for c in cls._chars_for_ranges if ("_" + c).isidentifier()
|
||||
)
|
||||
return "".join(sorted(identifier_chars | set(cls.identchars + "0123456789·")))
|
||||
return "".join(
|
||||
sorted(identifier_chars | set(cls.identchars) | set("0123456789·"))
|
||||
)
|
||||
|
||||
@_lazyclassproperty
|
||||
def identifier(cls):
|
||||
|
|
|
@ -246,7 +246,7 @@ def replaced_by_pep8(compat_name: str, fn: C) -> C:
|
|||
|
||||
# (Presence of 'self' arg in signature is used by explain_exception() methods, so we take
|
||||
# some extra steps to add it if present in decorated function.)
|
||||
if "self" == list(inspect.signature(fn).parameters)[0]:
|
||||
if ["self"] == list(inspect.signature(fn).parameters)[:1]:
|
||||
|
||||
@wraps(fn)
|
||||
def _inner(self, *args, **kwargs):
|
||||
|
|
|
@ -118,7 +118,7 @@ Serializing multiple objects to JSON lines (newline-delimited JSON)::
|
|||
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
__version__ = '3.19.2'
|
||||
__version__ = '3.19.3'
|
||||
__all__ = [
|
||||
'dump', 'dumps', 'load', 'loads',
|
||||
'JSONDecoder', 'JSONDecodeError', 'JSONEncoder',
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
apscheduler==3.10.1
|
||||
cryptography==43.0.0
|
||||
importlib-metadata==8.5.0
|
||||
importlib-resources==6.4.0
|
||||
importlib-resources==6.4.5
|
||||
pyinstaller==6.8.0
|
||||
pyopenssl==24.2.1
|
||||
|
||||
|
|
|
@ -27,11 +27,10 @@ from plexpy import users
|
|||
|
||||
class ActivityProcessor(object):
|
||||
|
||||
def __init__(self):
|
||||
self.db = database.MonitorDatabase()
|
||||
|
||||
def write_session(self, session=None, notify=True):
|
||||
if session:
|
||||
db = database.MonitorDatabase()
|
||||
|
||||
values = {'session_key': session.get('session_key', ''),
|
||||
'session_id': session.get('session_id', ''),
|
||||
'transcode_key': session.get('transcode_key', ''),
|
||||
|
@ -149,7 +148,7 @@ class ActivityProcessor(object):
|
|||
keys = {'session_key': session.get('session_key', ''),
|
||||
'rating_key': session.get('rating_key', '')}
|
||||
|
||||
result = self.db.upsert('sessions', values, keys)
|
||||
result = db.upsert('sessions', values, keys)
|
||||
|
||||
if result == 'insert':
|
||||
# If it's our first write then time stamp it.
|
||||
|
@ -159,7 +158,7 @@ class ActivityProcessor(object):
|
|||
media_type=values['media_type'],
|
||||
started=started)
|
||||
timestamp = {'started': started, 'initial_stream': initial_stream}
|
||||
self.db.upsert('sessions', timestamp, keys)
|
||||
db.upsert('sessions', timestamp, keys)
|
||||
|
||||
# Check if any notification agents have notifications enabled
|
||||
if notify:
|
||||
|
@ -260,6 +259,8 @@ class ActivityProcessor(object):
|
|||
logger.debug("Tautulli ActivityProcessor :: History logging for library '%s' is disabled." % library_details['section_name'])
|
||||
|
||||
if logging_enabled:
|
||||
db = database.MonitorDatabase()
|
||||
|
||||
media_info = {}
|
||||
|
||||
# Fetch metadata first so we can return false if it fails
|
||||
|
@ -316,10 +317,10 @@ class ActivityProcessor(object):
|
|||
|
||||
# logger.debug("Tautulli ActivityProcessor :: Writing sessionKey %s session_history transaction..."
|
||||
# % session['session_key'])
|
||||
self.db.upsert(table_name='session_history', key_dict=keys, value_dict=values)
|
||||
db.upsert(table_name='session_history', key_dict=keys, value_dict=values)
|
||||
|
||||
# Get the last insert row id
|
||||
last_id = self.db.last_insert_id()
|
||||
last_id = db.last_insert_id()
|
||||
self.group_history(last_id, session, metadata)
|
||||
|
||||
# logger.debug("Tautulli ActivityProcessor :: Successfully written history item, last id for session_history is %s"
|
||||
|
@ -410,7 +411,7 @@ class ActivityProcessor(object):
|
|||
|
||||
# logger.debug("Tautulli ActivityProcessor :: Writing sessionKey %s session_history_media_info transaction..."
|
||||
# % session['session_key'])
|
||||
self.db.upsert(table_name='session_history_media_info', key_dict=keys, value_dict=values)
|
||||
db.upsert(table_name='session_history_media_info', key_dict=keys, value_dict=values)
|
||||
|
||||
# Write the session_history_metadata table
|
||||
directors = ";".join(metadata['directors'])
|
||||
|
@ -475,7 +476,7 @@ class ActivityProcessor(object):
|
|||
|
||||
# logger.debug("Tautulli ActivityProcessor :: Writing sessionKey %s session_history_metadata transaction..."
|
||||
# % session['session_key'])
|
||||
self.db.upsert(table_name='session_history_metadata', key_dict=keys, value_dict=values)
|
||||
db.upsert(table_name='session_history_metadata', key_dict=keys, value_dict=values)
|
||||
|
||||
# Return the session row id when the session is successfully written to the database
|
||||
return session['id']
|
||||
|
@ -484,6 +485,8 @@ class ActivityProcessor(object):
|
|||
new_session = prev_session = None
|
||||
prev_watched = None
|
||||
|
||||
db = database.MonitorDatabase()
|
||||
|
||||
if session['live']:
|
||||
# Check if we should group the session, select the last guid from the user within the last day
|
||||
query = "SELECT session_history.id, session_history_metadata.guid, session_history.reference_id " \
|
||||
|
@ -495,7 +498,7 @@ class ActivityProcessor(object):
|
|||
|
||||
args = [last_id, session['user_id']]
|
||||
|
||||
result = self.db.select(query=query, args=args)
|
||||
result = db.select(query=query, args=args)
|
||||
|
||||
if len(result) > 0:
|
||||
new_session = {'id': last_id,
|
||||
|
@ -515,7 +518,7 @@ class ActivityProcessor(object):
|
|||
|
||||
args = [last_id, session['user_id'], session['rating_key']]
|
||||
|
||||
result = self.db.select(query=query, args=args)
|
||||
result = db.select(query=query, args=args)
|
||||
|
||||
if len(result) > 1:
|
||||
new_session = {'id': result[0]['id'],
|
||||
|
@ -558,9 +561,10 @@ class ActivityProcessor(object):
|
|||
logger.debug("Tautulli ActivityProcessor :: Not grouping history for sessionKey %s", session['session_key'])
|
||||
args = [last_id, last_id]
|
||||
|
||||
self.db.action(query=query, args=args)
|
||||
db.action(query=query, args=args)
|
||||
|
||||
def get_sessions(self, user_id=None, ip_address=None):
|
||||
db = database.MonitorDatabase()
|
||||
query = "SELECT * FROM sessions"
|
||||
args = []
|
||||
|
||||
|
@ -569,12 +573,13 @@ class ActivityProcessor(object):
|
|||
query += " WHERE user_id = ?" + ip
|
||||
args.append(user_id)
|
||||
|
||||
sessions = self.db.select(query, args)
|
||||
sessions = db.select(query, args)
|
||||
return sessions
|
||||
|
||||
def get_session_by_key(self, session_key=None):
|
||||
db = database.MonitorDatabase()
|
||||
if str(session_key).isdigit():
|
||||
session = self.db.select_single("SELECT * FROM sessions "
|
||||
session = db.select_single("SELECT * FROM sessions "
|
||||
"WHERE session_key = ? ",
|
||||
args=[session_key])
|
||||
if session:
|
||||
|
@ -583,8 +588,9 @@ class ActivityProcessor(object):
|
|||
return None
|
||||
|
||||
def get_session_by_id(self, session_id=None):
|
||||
db = database.MonitorDatabase()
|
||||
if session_id:
|
||||
session = self.db.select_single("SELECT * FROM sessions "
|
||||
session = db.select_single("SELECT * FROM sessions "
|
||||
"WHERE session_id = ? ",
|
||||
args=[session_id])
|
||||
if session:
|
||||
|
@ -593,6 +599,7 @@ class ActivityProcessor(object):
|
|||
return None
|
||||
|
||||
def set_session_state(self, session_key=None, state=None, **kwargs):
|
||||
db = database.MonitorDatabase()
|
||||
if str(session_key).isdigit():
|
||||
values = {}
|
||||
|
||||
|
@ -603,21 +610,23 @@ class ActivityProcessor(object):
|
|||
values[k] = v
|
||||
|
||||
keys = {'session_key': session_key}
|
||||
result = self.db.upsert('sessions', values, keys)
|
||||
result = db.upsert('sessions', values, keys)
|
||||
|
||||
return result
|
||||
|
||||
return None
|
||||
|
||||
def delete_session(self, session_key=None, row_id=None):
|
||||
db = database.MonitorDatabase()
|
||||
if str(session_key).isdigit():
|
||||
self.db.action("DELETE FROM sessions WHERE session_key = ?", [session_key])
|
||||
db.action("DELETE FROM sessions WHERE session_key = ?", [session_key])
|
||||
elif str(row_id).isdigit():
|
||||
self.db.action("DELETE FROM sessions WHERE id = ?", [row_id])
|
||||
db.action("DELETE FROM sessions WHERE id = ?", [row_id])
|
||||
|
||||
def set_session_last_paused(self, session_key=None, timestamp=None):
|
||||
db = database.MonitorDatabase()
|
||||
if str(session_key).isdigit():
|
||||
result = self.db.select("SELECT last_paused, paused_counter "
|
||||
result = db.select("SELECT last_paused, paused_counter "
|
||||
"FROM sessions "
|
||||
"WHERE session_key = ?", args=[session_key])
|
||||
|
||||
|
@ -636,17 +645,19 @@ class ActivityProcessor(object):
|
|||
values['paused_counter'] = paused_counter
|
||||
|
||||
keys = {'session_key': session_key}
|
||||
self.db.upsert('sessions', values, keys)
|
||||
db.upsert('sessions', values, keys)
|
||||
|
||||
def increment_session_buffer_count(self, session_key=None):
|
||||
db = database.MonitorDatabase()
|
||||
if str(session_key).isdigit():
|
||||
self.db.action("UPDATE sessions SET buffer_count = buffer_count + 1 "
|
||||
db.action("UPDATE sessions SET buffer_count = buffer_count + 1 "
|
||||
"WHERE session_key = ?",
|
||||
[session_key])
|
||||
|
||||
def get_session_buffer_count(self, session_key=None):
|
||||
db = database.MonitorDatabase()
|
||||
if str(session_key).isdigit():
|
||||
buffer_count = self.db.select_single("SELECT buffer_count "
|
||||
buffer_count = db.select_single("SELECT buffer_count "
|
||||
"FROM sessions "
|
||||
"WHERE session_key = ?",
|
||||
[session_key])
|
||||
|
@ -656,14 +667,16 @@ class ActivityProcessor(object):
|
|||
return 0
|
||||
|
||||
def set_session_buffer_trigger_time(self, session_key=None):
|
||||
db = database.MonitorDatabase()
|
||||
if str(session_key).isdigit():
|
||||
self.db.action("UPDATE sessions SET buffer_last_triggered = strftime('%s', 'now') "
|
||||
db.action("UPDATE sessions SET buffer_last_triggered = strftime('%s', 'now') "
|
||||
"WHERE session_key = ?",
|
||||
[session_key])
|
||||
|
||||
def get_session_buffer_trigger_time(self, session_key=None):
|
||||
db = database.MonitorDatabase()
|
||||
if str(session_key).isdigit():
|
||||
last_time = self.db.select_single("SELECT buffer_last_triggered "
|
||||
last_time = db.select_single("SELECT buffer_last_triggered "
|
||||
"FROM sessions "
|
||||
"WHERE session_key = ?",
|
||||
[session_key])
|
||||
|
@ -673,37 +686,43 @@ class ActivityProcessor(object):
|
|||
return None
|
||||
|
||||
def set_temp_stopped(self):
|
||||
db = database.MonitorDatabase()
|
||||
stopped_time = helpers.timestamp()
|
||||
self.db.action("UPDATE sessions SET stopped = ?", [stopped_time])
|
||||
db.action("UPDATE sessions SET stopped = ?", [stopped_time])
|
||||
|
||||
def increment_write_attempts(self, session_key=None):
|
||||
db = database.MonitorDatabase()
|
||||
if str(session_key).isdigit():
|
||||
session = self.get_session_by_key(session_key=session_key)
|
||||
self.db.action("UPDATE sessions SET write_attempts = ? WHERE session_key = ?",
|
||||
db.action("UPDATE sessions SET write_attempts = ? WHERE session_key = ?",
|
||||
[session['write_attempts'] + 1, session_key])
|
||||
|
||||
def set_marker(self, session_key=None, marker_idx=None, marker_type=None):
|
||||
db = database.MonitorDatabase()
|
||||
marker_args = [
|
||||
int(marker_type == 'intro'),
|
||||
int(marker_type == 'commercial'),
|
||||
int(marker_type == 'credits')
|
||||
]
|
||||
self.db.action("UPDATE sessions SET intro = ?, commercial = ?, credits = ?, marker = ? "
|
||||
db.action("UPDATE sessions SET intro = ?, commercial = ?, credits = ?, marker = ? "
|
||||
"WHERE session_key = ?",
|
||||
marker_args + [marker_idx, session_key])
|
||||
|
||||
def set_watched(self, session_key=None):
|
||||
self.db.action("UPDATE sessions SET watched = ? "
|
||||
db = database.MonitorDatabase()
|
||||
db.action("UPDATE sessions SET watched = ? "
|
||||
"WHERE session_key = ?",
|
||||
[1, session_key])
|
||||
|
||||
def write_continued_session(self, user_id=None, machine_id=None, media_type=None, stopped=None):
|
||||
db = database.MonitorDatabase()
|
||||
keys = {'user_id': user_id, 'machine_id': machine_id, 'media_type': media_type}
|
||||
values = {'stopped': stopped}
|
||||
self.db.upsert(table_name='sessions_continued', key_dict=keys, value_dict=values)
|
||||
db.upsert(table_name='sessions_continued', key_dict=keys, value_dict=values)
|
||||
|
||||
def is_initial_stream(self, user_id=None, machine_id=None, media_type=None, started=None):
|
||||
last_session = self.db.select_single("SELECT stopped "
|
||||
db = database.MonitorDatabase()
|
||||
last_session = db.select_single("SELECT stopped "
|
||||
"FROM sessions_continued "
|
||||
"WHERE user_id = ? AND machine_id = ? AND media_type = ? "
|
||||
"ORDER BY stopped DESC",
|
||||
|
@ -717,11 +736,12 @@ class ActivityProcessor(object):
|
|||
|
||||
logger.info("Tautulli ActivityProcessor :: Regrouping session history...")
|
||||
|
||||
db = database.MonitorDatabase()
|
||||
query = (
|
||||
"SELECT * FROM session_history "
|
||||
"JOIN session_history_metadata ON session_history.id = session_history_metadata.id"
|
||||
)
|
||||
results = self.db.select(query)
|
||||
results = db.select(query)
|
||||
count = len(results)
|
||||
progress = 0
|
||||
|
||||
|
|
|
@ -170,6 +170,7 @@ AUDIO_CODEC_OVERRIDES = {
|
|||
|
||||
VIDEO_RESOLUTION_OVERRIDES = {
|
||||
'sd': 'SD',
|
||||
'2k': '2k',
|
||||
'4k': '4k'
|
||||
}
|
||||
|
||||
|
|
|
@ -24,8 +24,6 @@ from cloudinary.utils import cloudinary_url
|
|||
from collections import OrderedDict
|
||||
from datetime import date, datetime, timezone
|
||||
from functools import reduce, wraps
|
||||
import hashlib
|
||||
import imghdr
|
||||
from itertools import groupby
|
||||
from future.moves.itertools import islice, zip_longest
|
||||
from ipaddress import ip_address, ip_network, IPv4Address
|
||||
|
@ -272,7 +270,8 @@ def human_duration(ms, sig='dhm', units='ms', return_seconds=300000):
|
|||
if return_seconds and ms < return_seconds:
|
||||
sig = 'dhms'
|
||||
|
||||
ms = ms * factors[units]
|
||||
r = factors[sig[-1]]
|
||||
ms = round(ms * factors[units] / r) * r
|
||||
|
||||
d, h = divmod(ms, factors['d'])
|
||||
h, m = divmod(h, factors['h'])
|
||||
|
@ -942,39 +941,6 @@ def cloudinary_transform(rating_key=None, width=1000, height=1500, opacity=100,
|
|||
return url
|
||||
|
||||
|
||||
def cache_image(url, image=None):
|
||||
"""
|
||||
Saves an image to the cache directory.
|
||||
If no image is provided, tries to return the image from the cache directory.
|
||||
"""
|
||||
# Create image directory if it doesn't exist
|
||||
imgdir = os.path.join(plexpy.CONFIG.CACHE_DIR, 'images/')
|
||||
if not os.path.exists(imgdir):
|
||||
logger.debug("Tautulli Helpers :: Creating image cache directory at %s" % imgdir)
|
||||
os.makedirs(imgdir)
|
||||
|
||||
# Create a hash of the url to use as the filename
|
||||
imghash = hashlib.md5(url).hexdigest()
|
||||
imagefile = os.path.join(imgdir, imghash)
|
||||
|
||||
# If an image is provided, save it to the cache directory
|
||||
if image:
|
||||
try:
|
||||
with open(imagefile, 'wb') as cache_file:
|
||||
cache_file.write(image)
|
||||
except IOError as e:
|
||||
logger.error("Tautulli Helpers :: Failed to cache image %s: %s" % (imagefile, e))
|
||||
|
||||
# Try to return the image from the cache directory
|
||||
if os.path.isfile(imagefile):
|
||||
imagetype = 'image/' + imghdr.what(os.path.abspath(imagefile))
|
||||
else:
|
||||
imagefile = None
|
||||
imagetype = 'image/jpeg'
|
||||
|
||||
return imagefile, imagetype
|
||||
|
||||
|
||||
def build_datatables_json(kwargs, dt_columns, default_sort_col=None):
|
||||
""" Builds datatables json data
|
||||
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
|
||||
from io import open
|
||||
import os
|
||||
import shlex
|
||||
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
import email.utils
|
||||
|
@ -58,25 +59,36 @@ def schedule_newsletters(newsletter_id=None):
|
|||
|
||||
|
||||
def schedule_newsletter_job(newsletter_job_id, name='', func=None, remove_job=False, args=None, cron=None):
|
||||
# apscheduler day_of_week uses 0-6 = mon-sun
|
||||
if cron:
|
||||
cron = cron.split(' ')
|
||||
cron[4] = str((int(cron[4]) - 1) % 7) if cron[4].isdigit() else cron[4]
|
||||
cron = ' '.join(cron)
|
||||
values = shlex.split(cron)
|
||||
# apscheduler day_of_week uses 0-6 = mon-sun
|
||||
values[4] = str((int(values[4]) - 1) % 7) if values[4].isdigit() else values[4]
|
||||
|
||||
if NEWSLETTER_SCHED.get_job(newsletter_job_id):
|
||||
if remove_job:
|
||||
NEWSLETTER_SCHED.remove_job(newsletter_job_id)
|
||||
logger.info("Tautulli NewsletterHandler :: Removed scheduled newsletter: %s" % name)
|
||||
else:
|
||||
try:
|
||||
NEWSLETTER_SCHED.reschedule_job(
|
||||
newsletter_job_id, args=args, trigger=CronTrigger.from_crontab(cron))
|
||||
newsletter_job_id, args=args, trigger=CronTrigger(
|
||||
minute=values[0], hour=values[1], day=values[2], month=values[3], day_of_week=values[4]
|
||||
)
|
||||
)
|
||||
logger.info("Tautulli NewsletterHandler :: Re-scheduled newsletter: %s" % name)
|
||||
except ValueError as e:
|
||||
logger.error("Tautulli NewsletterHandler :: Failed to re-schedule newsletter: %s" % e)
|
||||
elif not remove_job:
|
||||
try:
|
||||
NEWSLETTER_SCHED.add_job(
|
||||
func, args=args, id=newsletter_job_id, trigger=CronTrigger.from_crontab(cron),
|
||||
misfire_grace_time=None)
|
||||
func, args=args, id=newsletter_job_id, trigger=CronTrigger(
|
||||
minute=values[0], hour=values[1], day=values[2], month=values[3], day_of_week=values[4]
|
||||
),
|
||||
misfire_grace_time=None
|
||||
)
|
||||
logger.info("Tautulli NewsletterHandler :: Scheduled newsletter: %s" % name)
|
||||
except ValueError as e:
|
||||
logger.error("Tautulli NewsletterHandler :: Failed to schedule newsletter: %s" % e)
|
||||
|
||||
|
||||
def notify(newsletter_id=None, notify_action=None, **kwargs):
|
||||
|
|
|
@ -586,6 +586,8 @@ class Newsletter(object):
|
|||
return parameters
|
||||
|
||||
def _build_params(self):
|
||||
from plexpy.notification_handler import CustomArrow
|
||||
|
||||
date_format = helpers.momentjs_to_arrow(plexpy.CONFIG.DATE_FORMAT)
|
||||
|
||||
if plexpy.CONFIG.NEWSLETTER_SELF_HOSTED and plexpy.CONFIG.HTTP_BASE_URL:
|
||||
|
@ -595,8 +597,8 @@ class Newsletter(object):
|
|||
|
||||
parameters = {
|
||||
'server_name': helpers.pms_name(),
|
||||
'start_date': self.start_date.format(date_format),
|
||||
'end_date': self.end_date.format(date_format),
|
||||
'start_date': CustomArrow(self.start_date, date_format),
|
||||
'end_date': CustomArrow(self.end_date, date_format),
|
||||
'current_year': self.start_date.year,
|
||||
'current_month': self.start_date.month,
|
||||
'current_day': self.start_date.day,
|
||||
|
|
|
@ -16,4 +16,4 @@
|
|||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
PLEXPY_BRANCH = "master"
|
||||
PLEXPY_RELEASE_VERSION = "v2.14.4"
|
||||
PLEXPY_RELEASE_VERSION = "v2.14.6"
|
|
@ -281,7 +281,7 @@ def check_github(scheduler=False, notify=False, use_cache=False):
|
|||
'plexpy_update_commit': plexpy.LATEST_VERSION,
|
||||
'plexpy_update_behind': plexpy.COMMITS_BEHIND})
|
||||
|
||||
elif scheduler and plexpy.CONFIG.PLEXPY_AUTO_UPDATE and \
|
||||
if scheduler and plexpy.CONFIG.PLEXPY_AUTO_UPDATE and \
|
||||
not plexpy.DOCKER and not plexpy.SNAP and not plexpy.FROZEN:
|
||||
logger.info('Running automatic update.')
|
||||
plexpy.shutdown(restart=True, update=True)
|
||||
|
|
|
@ -3,7 +3,7 @@ arrow==1.3.0
|
|||
backports.zoneinfo==0.2.1;python_version<"3.9"
|
||||
beautifulsoup4==4.12.3
|
||||
bleach==6.1.0
|
||||
certifi==2024.7.4
|
||||
certifi==2024.8.30
|
||||
cheroot==10.0.1
|
||||
cherrypy==18.10.0
|
||||
cloudinary==1.41.0
|
||||
|
@ -17,7 +17,7 @@ html5lib==1.1
|
|||
httpagentparser==1.9.5
|
||||
idna==3.7
|
||||
importlib-metadata==8.5.0
|
||||
importlib-resources==6.4.0
|
||||
importlib-resources==6.4.5
|
||||
git+https://github.com/Tautulli/ipwhois.git@master#egg=ipwhois
|
||||
IPy==1.01
|
||||
Mako==1.3.5
|
||||
|
@ -26,18 +26,18 @@ musicbrainzngs==0.7.1
|
|||
packaging==24.1
|
||||
paho-mqtt==2.1.0
|
||||
platformdirs==4.2.2
|
||||
plexapi==4.15.15
|
||||
plexapi==4.15.16
|
||||
portend==3.2.0
|
||||
profilehooks==1.12.0
|
||||
PyJWT==2.9.0
|
||||
pyparsing==3.1.2
|
||||
pyparsing==3.1.4
|
||||
python-dateutil==2.9.0.post0
|
||||
python-twitter==3.5
|
||||
pytz==2024.1
|
||||
requests==2.32.3
|
||||
requests-oauthlib==2.0.0
|
||||
rumps==0.4.0; platform_system == "Darwin"
|
||||
simplejson==3.19.2
|
||||
simplejson==3.19.3
|
||||
six==1.16.0
|
||||
tempora==5.7.0
|
||||
tokenize-rt==6.0.0
|
||||
|
|
|
@ -26,14 +26,14 @@ parts:
|
|||
stage-packages:
|
||||
- python3
|
||||
- python3-openssl
|
||||
- python3-pycryptodome
|
||||
- python3-cryptography
|
||||
- python3-setuptools
|
||||
- python3-pkg-resources
|
||||
build-packages:
|
||||
- git
|
||||
- python3
|
||||
- python3-openssl
|
||||
- python3-pycryptodome
|
||||
- python3-cryptography
|
||||
- python3-setuptools
|
||||
- python3-pkg-resources
|
||||
override-pull: |
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue