Merge branch 'nightly' into dependabot/pip/nightly/importlib-metadata-8.5.0

This commit is contained in:
JonnyWong16 2024-11-16 14:48:47 -08:00 committed by GitHub
commit cefe500217
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
79 changed files with 1174 additions and 766 deletions

View file

@ -1,5 +1,27 @@
# Changelog # Changelog
## v2.14.6 (2024-10-12)
* Newsletters:
* Fix: Allow formatting newsletter date parameters.
* Change: Support apscheduler compatible cron expressions.
* UI:
* Fix: Round runtime before converting to human duration.
* Fix: Make recently added/watched rows touch scrollable.
* Other:
* Fix: Auto-updater not running.
## v2.14.5 (2024-09-20)
* Activity:
* Fix: Display of 2k resolution on activity card.
* Notifications:
* Fix: ntfy notifications with special characters failing to send.
* Other:
* Fix: Memory leak with database closing. (#2404)
## v2.14.4 (2024-08-10) ## v2.14.4 (2024-08-10)
* Notifications: * Notifications:
@ -8,7 +30,7 @@
* UI: * UI:
* Fix: macOS platform capitalization. * Fix: macOS platform capitalization.
* Other: * Other:
* Fix: Remove deprecated getdefaultlocale (Thanks @teodorstelian) (#2364, #2345) * Fix: Remove deprecated getdefaultlocale. (Thanks @teodorstelian) (#2364, #2345)
## v2.14.3 (2024-06-19) ## v2.14.3 (2024-06-19)

View file

@ -1478,7 +1478,8 @@ a:hover .dashboard-stats-square {
text-align: center; text-align: center;
position: relative; position: relative;
z-index: 0; z-index: 0;
overflow: hidden; overflow: auto;
scrollbar-width: none;
} }
.dashboard-recent-media { .dashboard-recent-media {
width: 100%; width: 100%;

View file

@ -92,10 +92,10 @@
<h3 class="pull-left"><span id="recently-added-xml">Recently Added</span></h3> <h3 class="pull-left"><span id="recently-added-xml">Recently Added</span></h3>
<ul class="nav nav-header nav-dashboard pull-right" style="margin-top: -3px;"> <ul class="nav nav-header nav-dashboard pull-right" style="margin-top: -3px;">
<li> <li>
<a href="#" id="recently-added-page-left" class="paginate btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-left"></i></a> <a href="#" id="recently-added-page-left" class="paginate-added btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-left"></i></a>
</li> </li>
<li> <li>
<a href="#" id="recently-added-page-right" class="paginate btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-right"></i></a> <a href="#" id="recently-added-page-right" class="paginate-added btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-right"></i></a>
</li> </li>
</ul> </ul>
<div class="button-bar"> <div class="button-bar">
@ -936,10 +936,14 @@
count: recently_added_count, count: recently_added_count,
media_type: recently_added_type media_type: recently_added_type
}, },
beforeSend: function () {
$(".dashboard-recent-media-row").animate({ scrollLeft: 0 }, 1000);
},
complete: function (xhr, status) { complete: function (xhr, status) {
$("#recentlyAdded").html(xhr.responseText); $("#recentlyAdded").html(xhr.responseText);
$('#ajaxMsg').fadeOut(); $('#ajaxMsg').fadeOut();
highlightAddedScrollerButton(); highlightScrollerButton("#recently-added");
paginateScroller("#recently-added", ".paginate-added");
} }
}); });
} }
@ -955,57 +959,11 @@
recentlyAdded(recently_added_count, recently_added_type); recentlyAdded(recently_added_count, recently_added_type);
} }
function highlightAddedScrollerButton() {
var scroller = $("#recently-added-row-scroller");
var numElems = scroller.find("li:visible").length;
scroller.width(numElems * 175);
if (scroller.width() > $("body").find(".container-fluid").width()) {
$("#recently-added-page-right").removeClass("disabled");
} else {
$("#recently-added-page-right").addClass("disabled");
}
}
$(window).resize(function () {
highlightAddedScrollerButton();
});
function resetScroller() {
leftTotal = 0;
$("#recently-added-row-scroller").animate({ left: leftTotal }, 1000);
$("#recently-added-page-left").addClass("disabled").blur();
}
var leftTotal = 0;
$(".paginate").click(function (e) {
e.preventDefault();
var scroller = $("#recently-added-row-scroller");
var containerWidth = $("body").find(".container-fluid").width();
var scrollAmount = $(this).data("id") * parseInt((containerWidth - 15) / 175) * 175;
var leftMax = Math.min(-parseInt(scroller.width()) + Math.abs(scrollAmount), 0);
leftTotal = Math.max(Math.min(leftTotal + scrollAmount, 0), leftMax);
scroller.animate({ left: leftTotal }, 250);
if (leftTotal === 0) {
$("#recently-added-page-left").addClass("disabled").blur();
} else {
$("#recently-added-page-left").removeClass("disabled");
}
if (leftTotal === leftMax) {
$("#recently-added-page-right").addClass("disabled").blur();
} else {
$("#recently-added-page-right").removeClass("disabled");
}
});
$('#recently-added-toggles').on('change', function () { $('#recently-added-toggles').on('change', function () {
$('#recently-added-toggles > label').removeClass('active'); $('#recently-added-toggles > label').removeClass('active');
selected_filter = $('input[name=recently-added-toggle]:checked', '#recently-added-toggles'); selected_filter = $('input[name=recently-added-toggle]:checked', '#recently-added-toggles');
$(selected_filter).closest('label').addClass('active'); $(selected_filter).closest('label').addClass('active');
recently_added_type = $(selected_filter).val(); recently_added_type = $(selected_filter).val();
resetScroller();
setLocalStorage('home_stats_recently_added_type', recently_added_type); setLocalStorage('home_stats_recently_added_type', recently_added_type);
recentlyAdded(recently_added_count, recently_added_type); recentlyAdded(recently_added_count, recently_added_type);
}); });
@ -1013,7 +971,6 @@
$('#recently-added-count').change(function () { $('#recently-added-count').change(function () {
forceMinMax($(this)); forceMinMax($(this));
recently_added_count = $(this).val(); recently_added_count = $(this).val();
resetScroller();
setLocalStorage('home_stats_recently_added_count', recently_added_count); setLocalStorage('home_stats_recently_added_count', recently_added_count);
recentlyAdded(recently_added_count, recently_added_type); recentlyAdded(recently_added_count, recently_added_type);
}); });

View file

@ -360,7 +360,8 @@ function humanDuration(ms, sig='dhm', units='ms', return_seconds=300000) {
sig = 'dhms' sig = 'dhms'
} }
ms = ms * factors[units]; r = factors[sig.slice(-1)];
ms = Math.round(ms * factors[units] / r) * r;
h = ms % factors['d']; h = ms % factors['d'];
d = Math.trunc(ms / factors['d']); d = Math.trunc(ms / factors['d']);
@ -929,3 +930,50 @@ $('.modal').on('hide.bs.modal', function (e) {
$.fn.hasScrollBar = function() { $.fn.hasScrollBar = function() {
return this.get(0).scrollHeight > this.get(0).clientHeight; return this.get(0).scrollHeight > this.get(0).clientHeight;
} }
function paginateScroller(scrollerId, buttonClass) {
$(buttonClass).click(function (e) {
e.preventDefault();
var scroller = $(scrollerId + "-row-scroller");
var scrollerParent = scroller.parent();
var containerWidth = scrollerParent.width();
var scrollCurrent = scrollerParent.scrollLeft();
var scrollAmount = $(this).data("id") * parseInt(containerWidth / 175) * 175;
var scrollMax = scroller.width() - Math.abs(scrollAmount);
var scrollTotal = Math.min(parseInt(scrollCurrent / 175) * 175 + scrollAmount, scrollMax);
scrollerParent.animate({ scrollLeft: scrollTotal }, 250);
});
}
function highlightScrollerButton(scrollerId) {
var scroller = $(scrollerId + "-row-scroller");
var scrollerParent = scroller.parent();
var buttonLeft = $(scrollerId + "-page-left");
var buttonRight = $(scrollerId + "-page-right");
var numElems = scroller.find("li").length;
scroller.width(numElems * 175);
$(buttonLeft).addClass("disabled").blur();
if (scroller.width() > scrollerParent.width()) {
$(buttonRight).removeClass("disabled");
} else {
$(buttonRight).addClass("disabled");
}
scrollerParent.scroll(function () {
var scrollCurrent = $(this).scrollLeft();
var scrollMax = scroller.width() - $(this).width();
if (scrollCurrent == 0) {
$(buttonLeft).addClass("disabled").blur();
} else {
$(buttonLeft).removeClass("disabled");
}
if (scrollCurrent >= scrollMax) {
$(buttonRight).addClass("disabled").blur();
} else {
$(buttonRight).removeClass("disabled");
}
});
}

View file

@ -149,10 +149,10 @@ DOCUMENTATION :: END
<div class="table-card-header"> <div class="table-card-header">
<ul class="nav nav-header nav-dashboard pull-right"> <ul class="nav nav-header nav-dashboard pull-right">
<li> <li>
<a href="#" id="recently-watched-page-left" class="paginate-watched btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-left"></i></a> <a href="#" id="recently-watched-page-left" class="paginate-watched btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-left"></i></a>
</li> </li>
<li> <li>
<a href="#" id="recently-watched-page-right" class="paginate-watched btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-right"></i></a> <a href="#" id="recently-watched-page-right" class="paginate-watched btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-right"></i></a>
</li> </li>
</ul> </ul>
<div class="header-bar"> <div class="header-bar">
@ -175,10 +175,10 @@ DOCUMENTATION :: END
<div class="table-card-header"> <div class="table-card-header">
<ul class="nav nav-header nav-dashboard pull-right"> <ul class="nav nav-header nav-dashboard pull-right">
<li> <li>
<a href="#" id="recently-added-page-left" class="paginate-added btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-left"></i></a> <a href="#" id="recently-added-page-left" class="paginate-added btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-left"></i></a>
</li> </li>
<li> <li>
<a href="#" id="recently-added-page-right" class="paginate-added btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-right"></i></a> <a href="#" id="recently-added-page-right" class="paginate-added btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-right"></i></a>
</li> </li>
</ul> </ul>
<div class="header-bar"> <div class="header-bar">
@ -690,7 +690,8 @@ DOCUMENTATION :: END
}, },
complete: function(xhr, status) { complete: function(xhr, status) {
$("#library-recently-watched").html(xhr.responseText); $("#library-recently-watched").html(xhr.responseText);
highlightWatchedScrollerButton(); highlightScrollerButton("#recently-watched");
paginateScroller("#recently-watched", ".paginate-watched");
} }
}); });
} }
@ -706,7 +707,8 @@ DOCUMENTATION :: END
}, },
complete: function(xhr, status) { complete: function(xhr, status) {
$("#library-recently-added").html(xhr.responseText); $("#library-recently-added").html(xhr.responseText);
highlightAddedScrollerButton(); highlightScrollerButton("#recently-added");
paginateScroller("#recently-added", ".paginate-added");
} }
}); });
} }
@ -716,83 +718,8 @@ DOCUMENTATION :: END
recentlyAdded(); recentlyAdded();
% endif % endif
function highlightWatchedScrollerButton() {
var scroller = $("#recently-watched-row-scroller");
var numElems = scroller.find("li").length;
scroller.width(numElems * 175);
if (scroller.width() > $("#library-recently-watched").width()) {
$("#recently-watched-page-right").removeClass("disabled");
} else {
$("#recently-watched-page-right").addClass("disabled");
}
}
function highlightAddedScrollerButton() {
var scroller = $("#recently-added-row-scroller");
var numElems = scroller.find("li").length;
scroller.width(numElems * 175);
if (scroller.width() > $("#library-recently-added").width()) {
$("#recently-added-page-right").removeClass("disabled");
} else {
$("#recently-added-page-right").addClass("disabled");
}
}
$(window).resize(function() {
highlightWatchedScrollerButton();
highlightAddedScrollerButton();
});
$('div.art-face').animate({ opacity: 0.2 }, { duration: 1000 }); $('div.art-face').animate({ opacity: 0.2 }, { duration: 1000 });
var leftTotalWatched = 0;
$(".paginate-watched").click(function (e) {
e.preventDefault();
var scroller = $("#recently-watched-row-scroller");
var containerWidth = $("#library-recently-watched").width();
var scrollAmount = $(this).data("id") * parseInt(containerWidth / 175) * 175;
var leftMax = Math.min(-parseInt(scroller.width()) + Math.abs(scrollAmount), 0);
leftTotalWatched = Math.max(Math.min(leftTotalWatched + scrollAmount, 0), leftMax);
scroller.animate({ left: leftTotalWatched }, 250);
if (leftTotalWatched == 0) {
$("#recently-watched-page-left").addClass("disabled").blur();
} else {
$("#recently-watched-page-left").removeClass("disabled");
}
if (leftTotalWatched == leftMax) {
$("#recently-watched-page-right").addClass("disabled").blur();
} else {
$("#recently-watched-page-right").removeClass("disabled");
}
});
var leftTotalAdded = 0;
$(".paginate-added").click(function (e) {
e.preventDefault();
var scroller = $("#recently-added-row-scroller");
var containerWidth = $("#library-recently-added").width();
var scrollAmount = $(this).data("id") * parseInt(containerWidth / 175) * 175;
var leftMax = Math.min(-parseInt(scroller.width()) + Math.abs(scrollAmount), 0);
leftTotalAdded = Math.max(Math.min(leftTotalAdded + scrollAmount, 0), leftMax);
scroller.animate({ left: leftTotalAdded }, 250);
if (leftTotalAdded == 0) {
$("#recently-added-page-left").addClass("disabled").blur();
} else {
$("#recently-added-page-left").removeClass("disabled");
}
if (leftTotalAdded == leftMax) {
$("#recently-added-page-right").addClass("disabled").blur();
} else {
$("#recently-added-page-right").removeClass("disabled");
}
});
$(document).ready(function () { $(document).ready(function () {
// Javascript to enable link to tab // Javascript to enable link to tab

View file

@ -36,7 +36,7 @@ DOCUMENTATION :: END
%> %>
<div class="dashboard-recent-media-row"> <div class="dashboard-recent-media-row">
<div id="recently-added-row-scroller" style="left: 0;"> <div id="recently-added-row-scroller">
<ul class="dashboard-recent-media list-unstyled"> <ul class="dashboard-recent-media list-unstyled">
% for item in data: % for item in data:
<li> <li>

View file

@ -50,7 +50,10 @@
</div> </div>
<p class="help-block"> <p class="help-block">
<span id="simple_cron_message">Set the schedule for the newsletter.</span> <span id="simple_cron_message">Set the schedule for the newsletter.</span>
<span id="custom_cron_message">Set the schedule for the newsletter using a <a href="${anon_url('https://crontab.guru')}" target="_blank" rel="noreferrer">custom crontab</a>. Only standard cron values are valid.</span> <span id="custom_cron_message">
Set the schedule for the newsletter using a <a href="${anon_url('https://crontab.guru')}" target="_blank" rel="noreferrer">custom crontab</a>.
<a href="${anon_url('https://apscheduler.readthedocs.io/en/3.x/modules/triggers/cron.html#expression-types')}" target="_blank" rel="noreferrer">Click here</a> for a list of supported expressions.
</span>
</p> </p>
</div> </div>
<div class="form-group"> <div class="form-group">
@ -481,7 +484,7 @@
}); });
if (${newsletter['config']['custom_cron']}) { if (${newsletter['config']['custom_cron']}) {
$('#cron_value').val('${newsletter['cron']}'); $('#cron_value').val('${newsletter['cron'] | n}');
} else { } else {
try { try {
cron_widget.cron('value', '${newsletter['cron']}'); cron_widget.cron('value', '${newsletter['cron']}');

View file

@ -36,7 +36,7 @@ DOCUMENTATION :: END
%> %>
% if data: % if data:
<div class="dashboard-recent-media-row"> <div class="dashboard-recent-media-row">
<div id="recently-added-row-scroller" style="left: 0;"> <div id="recently-added-row-scroller">
<ul class="dashboard-recent-media list-unstyled"> <ul class="dashboard-recent-media list-unstyled">
% for item in data: % for item in data:
<div class="dashboard-recent-media-instance"> <div class="dashboard-recent-media-instance">

View file

@ -125,10 +125,10 @@ DOCUMENTATION :: END
<div class="table-card-header"> <div class="table-card-header">
<ul class="nav nav-header nav-dashboard pull-right"> <ul class="nav nav-header nav-dashboard pull-right">
<li> <li>
<a href="#" id="recently-watched-page-left" class="paginate btn-gray disabled" data-id="+1"><i class="fa fa-lg fa-chevron-left"></i></a> <a href="#" id="recently-watched-page-left" class="paginate-watched btn-gray disabled" data-id="-1"><i class="fa fa-lg fa-chevron-left"></i></a>
</li> </li>
<li> <li>
<a href="#" id="recently-watched-page-right" class="paginate btn-gray" data-id="-1"><i class="fa fa-lg fa-chevron-right"></i></a> <a href="#" id="recently-watched-page-right" class="paginate-watched btn-gray" data-id="+1"><i class="fa fa-lg fa-chevron-right"></i></a>
</li> </li>
</ul> </ul>
<div class="header-bar"> <div class="header-bar">
@ -666,52 +666,14 @@ DOCUMENTATION :: END
}, },
complete: function(xhr, status) { complete: function(xhr, status) {
$("#user-recently-watched").html(xhr.responseText); $("#user-recently-watched").html(xhr.responseText);
highlightWatchedScrollerButton(); highlightScrollerButton("#recently-watched");
paginateScroller("#recently-watched", ".paginate-watched");
} }
}); });
} }
recentlyWatched(); recentlyWatched();
function highlightWatchedScrollerButton() {
var scroller = $("#recently-watched-row-scroller");
var numElems = scroller.find("li").length;
scroller.width(numElems * 175);
if (scroller.width() > $("#user-recently-watched").width()) {
$("#recently-watched-page-right").removeClass("disabled");
} else {
$("#recently-watched-page-right").addClass("disabled");
}
}
$(window).resize(function() {
highlightWatchedScrollerButton();
});
var leftTotal = 0;
$(".paginate").click(function (e) {
e.preventDefault();
var scroller = $("#recently-watched-row-scroller");
var containerWidth = $("#user-recently-watched").width();
var scrollAmount = $(this).data("id") * parseInt(containerWidth / 175) * 175;
var leftMax = Math.min(-parseInt(scroller.width()) + Math.abs(scrollAmount), 0);
leftTotal = Math.max(Math.min(leftTotal + scrollAmount, 0), leftMax);
scroller.animate({ left: leftTotal }, 250);
if (leftTotal == 0) {
$("#recently-watched-page-left").addClass("disabled").blur();
} else {
$("#recently-watched-page-left").removeClass("disabled");
}
if (leftTotal == leftMax) {
$("#recently-watched-page-right").addClass("disabled").blur();
} else {
$("#recently-watched-page-right").removeClass("disabled");
}
});
$(document).ready(function () { $(document).ready(function () {
// Javascript to enable link to tab // Javascript to enable link to tab
var hash = document.location.hash; var hash = document.location.hash;

View file

@ -31,7 +31,7 @@ DOCUMENTATION :: END
from plexpy.helpers import page, short_season from plexpy.helpers import page, short_season
%> %>
<div class="dashboard-recent-media-row"> <div class="dashboard-recent-media-row">
<div id="recently-watched-row-scroller" style="left: 0;"> <div id="recently-watched-row-scroller">
<ul class="dashboard-recent-media list-unstyled"> <ul class="dashboard-recent-media list-unstyled">
% for item in data: % for item in data:
<li> <li>

View file

@ -1,4 +1,4 @@
from .core import contents, where from .core import contents, where
__all__ = ["contents", "where"] __all__ = ["contents", "where"]
__version__ = "2024.07.04" __version__ = "2024.08.30"

View file

@ -4796,3 +4796,134 @@ PQQDAwNoADBlAjAdfKR7w4l1M+E7qUW/Runpod3JIha3RxEL2Jq68cgLcFBTApFw
hVmpHqTm6iMxoAACMQD94vizrxa5HnPEluPBMBnYfubDl94cT7iJLzPrSA8Z94dG hVmpHqTm6iMxoAACMQD94vizrxa5HnPEluPBMBnYfubDl94cT7iJLzPrSA8Z94dG
XSaQpYXFuXqUPoeovQA= XSaQpYXFuXqUPoeovQA=
-----END CERTIFICATE----- -----END CERTIFICATE-----
# Issuer: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA
# Subject: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA
# Label: "TWCA CYBER Root CA"
# Serial: 85076849864375384482682434040119489222
# MD5 Fingerprint: 0b:33:a0:97:52:95:d4:a9:fd:bb:db:6e:a3:55:5b:51
# SHA1 Fingerprint: f6:b1:1c:1a:83:38:e9:7b:db:b3:a8:c8:33:24:e0:2d:9c:7f:26:66
# SHA256 Fingerprint: 3f:63:bb:28:14:be:17:4e:c8:b6:43:9c:f0:8d:6d:56:f0:b7:c4:05:88:3a:56:48:a3:34:42:4d:6b:3e:c5:58
-----BEGIN CERTIFICATE-----
MIIFjTCCA3WgAwIBAgIQQAE0jMIAAAAAAAAAATzyxjANBgkqhkiG9w0BAQwFADBQ
MQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FOLUNBMRAwDgYDVQQLEwdSb290
IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3QgQ0EwHhcNMjIxMTIyMDY1NDI5
WhcNNDcxMTIyMTU1OTU5WjBQMQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FO
LUNBMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3Qg
Q0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDG+Moe2Qkgfh1sTs6P
40czRJzHyWmqOlt47nDSkvgEs1JSHWdyKKHfi12VCv7qze33Kc7wb3+szT3vsxxF
avcokPFhV8UMxKNQXd7UtcsZyoC5dc4pztKFIuwCY8xEMCDa6pFbVuYdHNWdZsc/
34bKS1PE2Y2yHer43CdTo0fhYcx9tbD47nORxc5zb87uEB8aBs/pJ2DFTxnk684i
JkXXYJndzk834H/nY62wuFm40AZoNWDTNq5xQwTxaWV4fPMf88oon1oglWa0zbfu
j3ikRRjpJi+NmykosaS3Om251Bw4ckVYsV7r8Cibt4LK/c/WMw+f+5eesRycnupf
Xtuq3VTpMCEobY5583WSjCb+3MX2w7DfRFlDo7YDKPYIMKoNM+HvnKkHIuNZW0CP
2oi3aQiotyMuRAlZN1vH4xfyIutuOVLF3lSnmMlLIJXcRolftBL5hSmO68gnFSDA
S9TMfAxsNAwmmyYxpjyn9tnQS6Jk/zuZQXLB4HCX8SS7K8R0IrGsayIyJNN4KsDA
oS/xUgXJP+92ZuJF2A09rZXIx4kmyA+upwMu+8Ff+iDhcK2wZSA3M2Cw1a/XDBzC
kHDXShi8fgGwsOsVHkQGzaRP6AzRwyAQ4VRlnrZR0Bp2a0JaWHY06rc3Ga4udfmW
5cFZ95RXKSWNOkyrTZpB0F8mAwIDAQABo2MwYTAOBgNVHQ8BAf8EBAMCAQYwDwYD
VR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBSdhWEUfMFib5do5E83QOGt4A1WNzAd
BgNVHQ4EFgQUnYVhFHzBYm+XaORPN0DhreANVjcwDQYJKoZIhvcNAQEMBQADggIB
AGSPesRiDrWIzLjHhg6hShbNcAu3p4ULs3a2D6f/CIsLJc+o1IN1KriWiLb73y0t
tGlTITVX1olNc79pj3CjYcya2x6a4CD4bLubIp1dhDGaLIrdaqHXKGnK/nZVekZn
68xDiBaiA9a5F/gZbG0jAn/xX9AKKSM70aoK7akXJlQKTcKlTfjF/biBzysseKNn
TKkHmvPfXvt89YnNdJdhEGoHK4Fa0o635yDRIG4kqIQnoVesqlVYL9zZyvpoBJ7t
RCT5dEA7IzOrg1oYJkK2bVS1FmAwbLGg+LhBoF1JSdJlBTrq/p1hvIbZv97Tujqx
f36SNI7JAG7cmL3c7IAFrQI932XtCwP39xaEBDG6k5TY8hL4iuO/Qq+n1M0RFxbI
Qh0UqEL20kCGoE8jypZFVmAGzbdVAaYBlGX+bgUJurSkquLvWL69J1bY73NxW0Qz
8ppy6rBePm6pUlvscG21h483XjyMnM7k8M4MZ0HMzvaAq07MTFb1wWFZk7Q+ptq4
NxKfKjLji7gh7MMrZQzvIt6IKTtM1/r+t+FHvpw+PoP7UV31aPcuIYXcv/Fa4nzX
xeSDwWrruoBa3lwtcHb4yOWHh8qgnaHlIhInD0Q9HWzq1MKLL295q39QpsQZp6F6
t5b5wR9iWqJDB0BeJsas7a5wFsWqynKKTbDPAYsDP27X
-----END CERTIFICATE-----
# Issuer: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd.
# Subject: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd.
# Label: "SecureSign Root CA12"
# Serial: 587887345431707215246142177076162061960426065942
# MD5 Fingerprint: c6:89:ca:64:42:9b:62:08:49:0b:1e:7f:e9:07:3d:e8
# SHA1 Fingerprint: 7a:22:1e:3d:de:1b:06:ac:9e:c8:47:70:16:8e:3c:e5:f7:6b:06:f4
# SHA256 Fingerprint: 3f:03:4b:b5:70:4d:44:b2:d0:85:45:a0:20:57:de:93:eb:f3:90:5f:ce:72:1a:cb:c7:30:c0:6d:da:ee:90:4e
-----BEGIN CERTIFICATE-----
MIIDcjCCAlqgAwIBAgIUZvnHwa/swlG07VOX5uaCwysckBYwDQYJKoZIhvcNAQEL
BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u
LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExMjAeFw0yMDA0MDgw
NTM2NDZaFw00MDA0MDgwNTM2NDZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD
eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS
b290IENBMTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC6OcE3emhF
KxS06+QT61d1I02PJC0W6K6OyX2kVzsqdiUzg2zqMoqUm048luT9Ub+ZyZN+v/mt
p7JIKwccJ/VMvHASd6SFVLX9kHrko+RRWAPNEHl57muTH2SOa2SroxPjcf59q5zd
J1M3s6oYwlkm7Fsf0uZlfO+TvdhYXAvA42VvPMfKWeP+bl+sg779XSVOKik71gur
FzJ4pOE+lEa+Ym6b3kaosRbnhW70CEBFEaCeVESE99g2zvVQR9wsMJvuwPWW0v4J
hscGWa5Pro4RmHvzC1KqYiaqId+OJTN5lxZJjfU+1UefNzFJM3IFTQy2VYzxV4+K
h9GtxRESOaCtAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
AgEGMB0GA1UdDgQWBBRXNPN0zwRL1SXm8UC2LEzZLemgrTANBgkqhkiG9w0BAQsF
AAOCAQEAPrvbFxbS8hQBICw4g0utvsqFepq2m2um4fylOqyttCg6r9cBg0krY6Ld
mmQOmFxv3Y67ilQiLUoT865AQ9tPkbeGGuwAtEGBpE/6aouIs3YIcipJQMPTw4WJ
mBClnW8Zt7vPemVV2zfrPIpyMpcemik+rY3moxtt9XUa5rBouVui7mlHJzWhhpmA
8zNL4WukJsPvdFlseqJkth5Ew1DgDzk9qTPxpfPSvWKErI4cqc1avTc7bgoitPQV
55FYxTpE05Uo2cBl6XLK0A+9H7MV2anjpEcJnuDLN/v9vZfVvhgaaaI5gdka9at/
yOPiZwud9AzqVN/Ssq+xIvEg37xEHA==
-----END CERTIFICATE-----
# Issuer: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd.
# Subject: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd.
# Label: "SecureSign Root CA14"
# Serial: 575790784512929437950770173562378038616896959179
# MD5 Fingerprint: 71:0d:72:fa:92:19:65:5e:89:04:ac:16:33:f0:bc:d5
# SHA1 Fingerprint: dd:50:c0:f7:79:b3:64:2e:74:a2:b8:9d:9f:d3:40:dd:bb:f0:f2:4f
# SHA256 Fingerprint: 4b:00:9c:10:34:49:4f:9a:b5:6b:ba:3b:a1:d6:27:31:fc:4d:20:d8:95:5a:dc:ec:10:a9:25:60:72:61:e3:38
-----BEGIN CERTIFICATE-----
MIIFcjCCA1qgAwIBAgIUZNtaDCBO6Ncpd8hQJ6JaJ90t8sswDQYJKoZIhvcNAQEM
BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u
LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNDAeFw0yMDA0MDgw
NzA2MTlaFw00NTA0MDgwNzA2MTlaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD
eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS
b290IENBMTQwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDF0nqh1oq/
FjHQmNE6lPxauG4iwWL3pwon71D2LrGeaBLwbCRjOfHw3xDG3rdSINVSW0KZnvOg
vlIfX8xnbacuUKLBl422+JX1sLrcneC+y9/3OPJH9aaakpUqYllQC6KxNedlsmGy
6pJxaeQp8E+BgQQ8sqVb1MWoWWd7VRxJq3qdwudzTe/NCcLEVxLbAQ4jeQkHO6Lo
/IrPj8BGJJw4J+CDnRugv3gVEOuGTgpa/d/aLIJ+7sr2KeH6caH3iGicnPCNvg9J
kdjqOvn90Ghx2+m1K06Ckm9mH+Dw3EzsytHqunQG+bOEkJTRX45zGRBdAuVwpcAQ
0BB8b8VYSbSwbprafZX1zNoCr7gsfXmPvkPx+SgojQlD+Ajda8iLLCSxjVIHvXib
y8posqTdDEx5YMaZ0ZPxMBoH064iwurO8YQJzOAUbn8/ftKChazcqRZOhaBgy/ac
18izju3Gm5h1DVXoX+WViwKkrkMpKBGk5hIwAUt1ax5mnXkvpXYvHUC0bcl9eQjs
0Wq2XSqypWa9a4X0dFbD9ed1Uigspf9mR6XU/v6eVL9lfgHWMI+lNpyiUBzuOIAB
SMbHdPTGrMNASRZhdCyvjG817XsYAFs2PJxQDcqSMxDxJklt33UkN4Ii1+iW/RVL
ApY+B3KVfqs9TC7XyvDf4Fg/LS8EmjijAQIDAQABo0IwQDAPBgNVHRMBAf8EBTAD
AQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUBpOjCl4oaTeqYR3r6/wtbyPk
86AwDQYJKoZIhvcNAQEMBQADggIBAJaAcgkGfpzMkwQWu6A6jZJOtxEaCnFxEM0E
rX+lRVAQZk5KQaID2RFPeje5S+LGjzJmdSX7684/AykmjbgWHfYfM25I5uj4V7Ib
ed87hwriZLoAymzvftAj63iP/2SbNDefNWWipAA9EiOWWF3KY4fGoweITedpdopT
zfFP7ELyk+OZpDc8h7hi2/DsHzc/N19DzFGdtfCXwreFamgLRB7lUe6TzktuhsHS
DCRZNhqfLJGP4xjblJUK7ZGqDpncllPjYYPGFrojutzdfhrGe0K22VoF3Jpf1d+4
2kd92jjbrDnVHmtsKheMYc2xbXIBw8MgAGJoFjHVdqqGuw6qnsb58Nn4DSEC5MUo
FlkRudlpcyqSeLiSV5sI8jrlL5WwWLdrIBRtFO8KvH7YVdiI2i/6GaX7i+B/OfVy
K4XELKzvGUWSTLNhB9xNH27SgRNcmvMSZ4PPmz+Ln52kuaiWA3rF7iDeM9ovnhp6
dB7h7sxaOgTdsxoEqBRjrLdHEoOabPXm6RUVkRqEGQ6UROcSjiVbgGcZ3GOTEAtl
Lor6CZpO2oYofaphNdgOpygau1LgePhsumywbrmHXumZNTfxPWQrqaA0k89jL9WB
365jJ6UeTo3cKXhZ+PmhIIynJkBugnLNeLLIjzwec+fBH7/PzqUqm9tEZDKgu39c
JRNItX+S
-----END CERTIFICATE-----
# Issuer: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd.
# Subject: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd.
# Label: "SecureSign Root CA15"
# Serial: 126083514594751269499665114766174399806381178503
# MD5 Fingerprint: 13:30:fc:c4:62:a6:a9:de:b5:c1:68:af:b5:d2:31:47
# SHA1 Fingerprint: cb:ba:83:c8:c1:5a:5d:f1:f9:73:6f:ca:d7:ef:28:13:06:4a:07:7d
# SHA256 Fingerprint: e7:78:f0:f0:95:fe:84:37:29:cd:1a:00:82:17:9e:53:14:a9:c2:91:44:28:05:e1:fb:1d:8f:b6:b8:88:6c:3a
-----BEGIN CERTIFICATE-----
MIICIzCCAamgAwIBAgIUFhXHw9hJp75pDIqI7fBw+d23PocwCgYIKoZIzj0EAwMw
UTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28uLCBM
dGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNTAeFw0yMDA0MDgwODMy
NTZaFw00NTA0MDgwODMyNTZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpDeWJl
cnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBSb290
IENBMTUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQLUHSNZDKZmbPSYAi4Io5GdCx4
wCtELW1fHcmuS1Iggz24FG1Th2CeX2yF2wYUleDHKP+dX+Sq8bOLbe1PL0vJSpSR
ZHX+AezB2Ot6lHhWGENfa4HL9rzatAy2KZMIaY+jQjBAMA8GA1UdEwEB/wQFMAMB
Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTrQciu/NWeUUj1vYv0hyCTQSvT
9DAKBggqhkjOPQQDAwNoADBlAjEA2S6Jfl5OpBEHvVnCB96rMjhTKkZEBhd6zlHp
4P9mLQlO4E/0BdGF9jVg3PVys0Z9AjBEmEYagoUeYWmJSwdLZrWeqrqgHkHZAXQ6
bkU6iYAZezKYVWOr62Nuk22rGwlgMU4=
-----END CERTIFICATE-----

View file

@ -1,4 +1,11 @@
"""Read resources contained within a package.""" """
Read resources contained within a package.
This codebase is shared between importlib.resources in the stdlib
and importlib_resources in PyPI. See
https://github.com/python/importlib_metadata/wiki/Development-Methodology
for more detail.
"""
from ._common import ( from ._common import (
as_file, as_file,
@ -7,7 +14,7 @@ from ._common import (
Anchor, Anchor,
) )
from .functional import ( from ._functional import (
contents, contents,
is_resource, is_resource,
open_binary, open_binary,

View file

@ -66,10 +66,10 @@ def get_resource_reader(package: types.ModuleType) -> Optional[ResourceReader]:
# zipimport.zipimporter does not support weak references, resulting in a # zipimport.zipimporter does not support weak references, resulting in a
# TypeError. That seems terrible. # TypeError. That seems terrible.
spec = package.__spec__ spec = package.__spec__
reader = getattr(spec.loader, 'get_resource_reader', None) # type: ignore reader = getattr(spec.loader, 'get_resource_reader', None) # type: ignore[union-attr]
if reader is None: if reader is None:
return None return None
return reader(spec.name) # type: ignore return reader(spec.name) # type: ignore[union-attr]
@functools.singledispatch @functools.singledispatch
@ -93,12 +93,13 @@ def _infer_caller():
""" """
def is_this_file(frame_info): def is_this_file(frame_info):
return frame_info.filename == __file__ return frame_info.filename == stack[0].filename
def is_wrapper(frame_info): def is_wrapper(frame_info):
return frame_info.function == 'wrapper' return frame_info.function == 'wrapper'
not_this_file = itertools.filterfalse(is_this_file, inspect.stack()) stack = inspect.stack()
not_this_file = itertools.filterfalse(is_this_file, stack)
# also exclude 'wrapper' due to singledispatch in the call stack # also exclude 'wrapper' due to singledispatch in the call stack
callers = itertools.filterfalse(is_wrapper, not_this_file) callers = itertools.filterfalse(is_wrapper, not_this_file)
return next(callers).frame return next(callers).frame
@ -182,7 +183,7 @@ def _(path):
@contextlib.contextmanager @contextlib.contextmanager
def _temp_path(dir: tempfile.TemporaryDirectory): def _temp_path(dir: tempfile.TemporaryDirectory):
""" """
Wrap tempfile.TemporyDirectory to return a pathlib object. Wrap tempfile.TemporaryDirectory to return a pathlib object.
""" """
with dir as result: with dir as result:
yield pathlib.Path(result) yield pathlib.Path(result)

View file

@ -5,6 +5,6 @@ __all__ = ['ZipPath']
if sys.version_info >= (3, 10): if sys.version_info >= (3, 10):
from zipfile import Path as ZipPath # type: ignore from zipfile import Path as ZipPath
else: else:
from zipp import Path as ZipPath # type: ignore from zipp import Path as ZipPath

View file

@ -1,3 +1,5 @@
from __future__ import annotations
import collections import collections
import contextlib import contextlib
import itertools import itertools
@ -5,6 +7,7 @@ import pathlib
import operator import operator
import re import re
import warnings import warnings
from collections.abc import Iterator
from . import abc from . import abc
@ -34,8 +37,10 @@ class FileReader(abc.TraversableResources):
class ZipReader(abc.TraversableResources): class ZipReader(abc.TraversableResources):
def __init__(self, loader, module): def __init__(self, loader, module):
_, _, name = module.rpartition('.') self.prefix = loader.prefix.replace('\\', '/')
self.prefix = loader.prefix.replace('\\', '/') + name + '/' if loader.is_package(module):
_, _, name = module.rpartition('.')
self.prefix += name + '/'
self.archive = loader.archive self.archive = loader.archive
def open_resource(self, resource): def open_resource(self, resource):
@ -133,27 +138,31 @@ class NamespaceReader(abc.TraversableResources):
def __init__(self, namespace_path): def __init__(self, namespace_path):
if 'NamespacePath' not in str(namespace_path): if 'NamespacePath' not in str(namespace_path):
raise ValueError('Invalid path') raise ValueError('Invalid path')
self.path = MultiplexedPath(*map(self._resolve, namespace_path)) self.path = MultiplexedPath(*filter(bool, map(self._resolve, namespace_path)))
@classmethod @classmethod
def _resolve(cls, path_str) -> abc.Traversable: def _resolve(cls, path_str) -> abc.Traversable | None:
r""" r"""
Given an item from a namespace path, resolve it to a Traversable. Given an item from a namespace path, resolve it to a Traversable.
path_str might be a directory on the filesystem or a path to a path_str might be a directory on the filesystem or a path to a
zipfile plus the path within the zipfile, e.g. ``/foo/bar`` or zipfile plus the path within the zipfile, e.g. ``/foo/bar`` or
``/foo/baz.zip/inner_dir`` or ``foo\baz.zip\inner_dir\sub``. ``/foo/baz.zip/inner_dir`` or ``foo\baz.zip\inner_dir\sub``.
path_str might also be a sentinel used by editable packages to
trigger other behaviors (see python/importlib_resources#311).
In that case, return None.
""" """
(dir,) = (cand for cand in cls._candidate_paths(path_str) if cand.is_dir()) dirs = (cand for cand in cls._candidate_paths(path_str) if cand.is_dir())
return dir return next(dirs, None)
@classmethod @classmethod
def _candidate_paths(cls, path_str): def _candidate_paths(cls, path_str: str) -> Iterator[abc.Traversable]:
yield pathlib.Path(path_str) yield pathlib.Path(path_str)
yield from cls._resolve_zip_path(path_str) yield from cls._resolve_zip_path(path_str)
@staticmethod @staticmethod
def _resolve_zip_path(path_str): def _resolve_zip_path(path_str: str):
for match in reversed(list(re.finditer(r'[\\/]', path_str))): for match in reversed(list(re.finditer(r'[\\/]', path_str))):
with contextlib.suppress( with contextlib.suppress(
FileNotFoundError, FileNotFoundError,

View file

@ -77,7 +77,7 @@ class ResourceHandle(Traversable):
def __init__(self, parent: ResourceContainer, name: str): def __init__(self, parent: ResourceContainer, name: str):
self.parent = parent self.parent = parent
self.name = name # type: ignore self.name = name # type: ignore[misc]
def is_file(self): def is_file(self):
return True return True

View file

@ -2,15 +2,44 @@ import pathlib
import functools import functools
from typing import Dict, Union from typing import Dict, Union
from typing import runtime_checkable
from typing import Protocol
#### ####
# from jaraco.path 3.4.1 # from jaraco.path 3.7.1
FilesSpec = Dict[str, Union[str, bytes, 'FilesSpec']] # type: ignore
def build(spec: FilesSpec, prefix=pathlib.Path()): class Symlink(str):
"""
A string indicating the target of a symlink.
"""
FilesSpec = Dict[str, Union[str, bytes, Symlink, 'FilesSpec']]
@runtime_checkable
class TreeMaker(Protocol):
def __truediv__(self, *args, **kwargs): ... # pragma: no cover
def mkdir(self, **kwargs): ... # pragma: no cover
def write_text(self, content, **kwargs): ... # pragma: no cover
def write_bytes(self, content): ... # pragma: no cover
def symlink_to(self, target): ... # pragma: no cover
def _ensure_tree_maker(obj: Union[str, TreeMaker]) -> TreeMaker:
return obj if isinstance(obj, TreeMaker) else pathlib.Path(obj) # type: ignore[return-value]
def build(
spec: FilesSpec,
prefix: Union[str, TreeMaker] = pathlib.Path(), # type: ignore[assignment]
):
""" """
Build a set of files/directories, as described by the spec. Build a set of files/directories, as described by the spec.
@ -25,21 +54,25 @@ def build(spec: FilesSpec, prefix=pathlib.Path()):
... "__init__.py": "", ... "__init__.py": "",
... }, ... },
... "baz.py": "# Some code", ... "baz.py": "# Some code",
... } ... "bar.py": Symlink("baz.py"),
... },
... "bing": Symlink("foo"),
... } ... }
>>> target = getfixture('tmp_path') >>> target = getfixture('tmp_path')
>>> build(spec, target) >>> build(spec, target)
>>> target.joinpath('foo/baz.py').read_text(encoding='utf-8') >>> target.joinpath('foo/baz.py').read_text(encoding='utf-8')
'# Some code' '# Some code'
>>> target.joinpath('bing/bar.py').read_text(encoding='utf-8')
'# Some code'
""" """
for name, contents in spec.items(): for name, contents in spec.items():
create(contents, pathlib.Path(prefix) / name) create(contents, _ensure_tree_maker(prefix) / name)
@functools.singledispatch @functools.singledispatch
def create(content: Union[str, bytes, FilesSpec], path): def create(content: Union[str, bytes, FilesSpec], path):
path.mkdir(exist_ok=True) path.mkdir(exist_ok=True)
build(content, prefix=path) # type: ignore build(content, prefix=path) # type: ignore[arg-type]
@create.register @create.register
@ -52,5 +85,10 @@ def _(content: str, path):
path.write_text(content, encoding='utf-8') path.write_text(content, encoding='utf-8')
@create.register
def _(content: Symlink, path):
path.symlink_to(content)
# end from jaraco.path # end from jaraco.path
#### ####

View file

@ -8,3 +8,6 @@ import_helper = try_import('import_helper') or from_test_support(
'modules_setup', 'modules_cleanup', 'DirsOnSysPath' 'modules_setup', 'modules_cleanup', 'DirsOnSysPath'
) )
os_helper = try_import('os_helper') or from_test_support('temp_dir') os_helper = try_import('os_helper') or from_test_support('temp_dir')
warnings_helper = try_import('warnings_helper') or from_test_support(
'ignore_warnings', 'check_warnings'
)

View file

@ -1 +0,0 @@
Hello, UTF-8 world!

View file

@ -1 +0,0 @@
one resource

View file

@ -1 +0,0 @@
two resource

View file

@ -1 +0,0 @@
Hello, UTF-8 world!

View file

@ -1,7 +1,6 @@
import unittest import unittest
import importlib_resources as resources import importlib_resources as resources
from . import data01
from . import util from . import util
@ -19,16 +18,17 @@ class ContentsTests:
assert self.expected <= contents assert self.expected <= contents
class ContentsDiskTests(ContentsTests, unittest.TestCase): class ContentsDiskTests(ContentsTests, util.DiskSetup, unittest.TestCase):
def setUp(self): pass
self.data = data01
class ContentsZipTests(ContentsTests, util.ZipSetup, unittest.TestCase): class ContentsZipTests(ContentsTests, util.ZipSetup, unittest.TestCase):
pass pass
class ContentsNamespaceTests(ContentsTests, unittest.TestCase): class ContentsNamespaceTests(ContentsTests, util.DiskSetup, unittest.TestCase):
MODULE = 'namespacedata01'
expected = { expected = {
# no __init__ because of namespace design # no __init__ because of namespace design
'binary.file', 'binary.file',
@ -36,8 +36,3 @@ class ContentsNamespaceTests(ContentsTests, unittest.TestCase):
'utf-16.file', 'utf-16.file',
'utf-8.file', 'utf-8.file',
} }
def setUp(self):
from . import namespacedata01
self.data = namespacedata01

View file

@ -1,3 +1,7 @@
import os
import pathlib
import py_compile
import shutil
import textwrap import textwrap
import unittest import unittest
import warnings import warnings
@ -6,11 +10,8 @@ import contextlib
import importlib_resources as resources import importlib_resources as resources
from ..abc import Traversable from ..abc import Traversable
from . import data01
from . import util from . import util
from . import _path from .compat.py39 import os_helper, import_helper
from .compat.py39 import os_helper
from .compat.py312 import import_helper
@contextlib.contextmanager @contextlib.contextmanager
@ -48,70 +49,146 @@ class FilesTests:
resources.files(package=self.data) resources.files(package=self.data)
class OpenDiskTests(FilesTests, unittest.TestCase): class OpenDiskTests(FilesTests, util.DiskSetup, unittest.TestCase):
def setUp(self): pass
self.data = data01
class OpenZipTests(FilesTests, util.ZipSetup, unittest.TestCase): class OpenZipTests(FilesTests, util.ZipSetup, unittest.TestCase):
pass pass
class OpenNamespaceTests(FilesTests, unittest.TestCase): class OpenNamespaceTests(FilesTests, util.DiskSetup, unittest.TestCase):
def setUp(self): MODULE = 'namespacedata01'
from . import namespacedata01
self.data = namespacedata01 def test_non_paths_in_dunder_path(self):
"""
Non-path items in a namespace package's ``__path__`` are ignored.
As reported in python/importlib_resources#311, some tools
like Setuptools, when creating editable packages, will inject
non-paths into a namespace package's ``__path__``, a
sentinel like
``__editable__.sample_namespace-1.0.finder.__path_hook__``
to cause the ``PathEntryFinder`` to be called when searching
for packages. In that case, resources should still be loadable.
"""
import namespacedata01
namespacedata01.__path__.append(
'__editable__.sample_namespace-1.0.finder.__path_hook__'
)
resources.files(namespacedata01)
class OpenNamespaceZipTests(FilesTests, util.ZipSetup, unittest.TestCase): class OpenNamespaceZipTests(FilesTests, util.ZipSetup, unittest.TestCase):
ZIP_MODULE = 'namespacedata01' ZIP_MODULE = 'namespacedata01'
class SiteDir: class DirectSpec:
def setUp(self): """
self.fixtures = contextlib.ExitStack() Override behavior of ModuleSetup to write a full spec directly.
self.addCleanup(self.fixtures.close) """
self.site_dir = self.fixtures.enter_context(os_helper.temp_dir())
self.fixtures.enter_context(import_helper.DirsOnSysPath(self.site_dir)) MODULE = 'unused'
self.fixtures.enter_context(import_helper.isolated_modules())
def load_fixture(self, name):
self.tree_on_path(self.spec)
class ModulesFilesTests(SiteDir, unittest.TestCase): class ModulesFiles:
spec = {
'mod.py': '',
'res.txt': 'resources are the best',
}
def test_module_resources(self): def test_module_resources(self):
""" """
A module can have resources found adjacent to the module. A module can have resources found adjacent to the module.
""" """
spec = { import mod # type: ignore[import-not-found]
'mod.py': '',
'res.txt': 'resources are the best',
}
_path.build(spec, self.site_dir)
import mod
actual = resources.files(mod).joinpath('res.txt').read_text(encoding='utf-8') actual = resources.files(mod).joinpath('res.txt').read_text(encoding='utf-8')
assert actual == spec['res.txt'] assert actual == self.spec['res.txt']
class ImplicitContextFilesTests(SiteDir, unittest.TestCase): class ModuleFilesDiskTests(DirectSpec, util.DiskSetup, ModulesFiles, unittest.TestCase):
def test_implicit_files(self): pass
class ModuleFilesZipTests(DirectSpec, util.ZipSetup, ModulesFiles, unittest.TestCase):
pass
class ImplicitContextFiles:
set_val = textwrap.dedent(
f"""
import {resources.__name__} as res
val = res.files().joinpath('res.txt').read_text(encoding='utf-8')
"""
)
spec = {
'somepkg': {
'__init__.py': set_val,
'submod.py': set_val,
'res.txt': 'resources are the best',
},
'frozenpkg': {
'__init__.py': set_val.replace(resources.__name__, 'c_resources'),
'res.txt': 'resources are the best',
},
}
def test_implicit_files_package(self):
""" """
Without any parameter, files() will infer the location as the caller. Without any parameter, files() will infer the location as the caller.
""" """
spec = {
'somepkg': {
'__init__.py': textwrap.dedent(
"""
import importlib_resources as res
val = res.files().joinpath('res.txt').read_text(encoding='utf-8')
"""
),
'res.txt': 'resources are the best',
},
}
_path.build(spec, self.site_dir)
assert importlib.import_module('somepkg').val == 'resources are the best' assert importlib.import_module('somepkg').val == 'resources are the best'
def test_implicit_files_submodule(self):
"""
Without any parameter, files() will infer the location as the caller.
"""
assert importlib.import_module('somepkg.submod').val == 'resources are the best'
def _compile_importlib(self):
"""
Make a compiled-only copy of the importlib resources package.
"""
bin_site = self.fixtures.enter_context(os_helper.temp_dir())
c_resources = pathlib.Path(bin_site, 'c_resources')
sources = pathlib.Path(resources.__file__).parent
shutil.copytree(sources, c_resources, ignore=lambda *_: ['__pycache__'])
for dirpath, _, filenames in os.walk(c_resources):
for filename in filenames:
source_path = pathlib.Path(dirpath) / filename
cfile = source_path.with_suffix('.pyc')
py_compile.compile(source_path, cfile)
pathlib.Path.unlink(source_path)
self.fixtures.enter_context(import_helper.DirsOnSysPath(bin_site))
def test_implicit_files_with_compiled_importlib(self):
"""
Caller detection works for compiled-only resources module.
python/cpython#123085
"""
self._compile_importlib()
assert importlib.import_module('frozenpkg').val == 'resources are the best'
class ImplicitContextFilesDiskTests(
DirectSpec, util.DiskSetup, ImplicitContextFiles, unittest.TestCase
):
pass
class ImplicitContextFilesZipTests(
DirectSpec, util.ZipSetup, ImplicitContextFiles, unittest.TestCase
):
pass
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

View file

@ -1,31 +1,38 @@
import unittest import unittest
import os import os
import contextlib import importlib
try: from .compat.py39 import warnings_helper
from test.support.warnings_helper import ignore_warnings, check_warnings
except ImportError:
# older Python versions
from test.support import ignore_warnings, check_warnings
import importlib_resources as resources import importlib_resources as resources
from . import util
# Since the functional API forwards to Traversable, we only test # Since the functional API forwards to Traversable, we only test
# filesystem resources here -- not zip files, namespace packages etc. # filesystem resources here -- not zip files, namespace packages etc.
# We do test for two kinds of Anchor, though. # We do test for two kinds of Anchor, though.
class StringAnchorMixin: class StringAnchorMixin:
anchor01 = 'importlib_resources.tests.data01' anchor01 = 'data01'
anchor02 = 'importlib_resources.tests.data02' anchor02 = 'data02'
class ModuleAnchorMixin: class ModuleAnchorMixin:
from . import data01 as anchor01 @property
from . import data02 as anchor02 def anchor01(self):
return importlib.import_module('data01')
@property
def anchor02(self):
return importlib.import_module('data02')
class FunctionalAPIBase: class FunctionalAPIBase(util.DiskSetup):
def setUp(self):
super().setUp()
self.load_fixture('data02')
def _gen_resourcetxt_path_parts(self): def _gen_resourcetxt_path_parts(self):
"""Yield various names of a text file in anchor02, each in a subTest""" """Yield various names of a text file in anchor02, each in a subTest"""
for path_parts in ( for path_parts in (
@ -36,6 +43,12 @@ class FunctionalAPIBase:
with self.subTest(path_parts=path_parts): with self.subTest(path_parts=path_parts):
yield path_parts yield path_parts
def assertEndsWith(self, string, suffix):
"""Assert that `string` ends with `suffix`.
Used to ignore an architecture-specific UTF-16 byte-order mark."""
self.assertEqual(string[-len(suffix) :], suffix)
def test_read_text(self): def test_read_text(self):
self.assertEqual( self.assertEqual(
resources.read_text(self.anchor01, 'utf-8.file'), resources.read_text(self.anchor01, 'utf-8.file'),
@ -76,13 +89,13 @@ class FunctionalAPIBase:
), ),
'\x00\x01\x02\x03', '\x00\x01\x02\x03',
) )
self.assertEqual( self.assertEndsWith( # ignore the BOM
resources.read_text( resources.read_text(
self.anchor01, self.anchor01,
'utf-16.file', 'utf-16.file',
errors='backslashreplace', errors='backslashreplace',
), ),
'Hello, UTF-16 world!\n'.encode('utf-16').decode( 'Hello, UTF-16 world!\n'.encode('utf-16-le').decode(
errors='backslashreplace', errors='backslashreplace',
), ),
) )
@ -128,9 +141,9 @@ class FunctionalAPIBase:
'utf-16.file', 'utf-16.file',
errors='backslashreplace', errors='backslashreplace',
) as f: ) as f:
self.assertEqual( self.assertEndsWith( # ignore the BOM
f.read(), f.read(),
'Hello, UTF-16 world!\n'.encode('utf-16').decode( 'Hello, UTF-16 world!\n'.encode('utf-16-le').decode(
errors='backslashreplace', errors='backslashreplace',
), ),
) )
@ -163,32 +176,32 @@ class FunctionalAPIBase:
self.assertTrue(is_resource(self.anchor02, *path_parts)) self.assertTrue(is_resource(self.anchor02, *path_parts))
def test_contents(self): def test_contents(self):
with check_warnings((".*contents.*", DeprecationWarning)): with warnings_helper.check_warnings((".*contents.*", DeprecationWarning)):
c = resources.contents(self.anchor01) c = resources.contents(self.anchor01)
self.assertGreaterEqual( self.assertGreaterEqual(
set(c), set(c),
{'utf-8.file', 'utf-16.file', 'binary.file', 'subdirectory'}, {'utf-8.file', 'utf-16.file', 'binary.file', 'subdirectory'},
) )
with contextlib.ExitStack() as cm: with self.assertRaises(OSError), warnings_helper.check_warnings((
cm.enter_context(self.assertRaises(OSError)) ".*contents.*",
cm.enter_context(check_warnings((".*contents.*", DeprecationWarning))) DeprecationWarning,
)):
list(resources.contents(self.anchor01, 'utf-8.file')) list(resources.contents(self.anchor01, 'utf-8.file'))
for path_parts in self._gen_resourcetxt_path_parts(): for path_parts in self._gen_resourcetxt_path_parts():
with contextlib.ExitStack() as cm: with self.assertRaises(OSError), warnings_helper.check_warnings((
cm.enter_context(self.assertRaises(OSError)) ".*contents.*",
cm.enter_context(check_warnings((".*contents.*", DeprecationWarning))) DeprecationWarning,
)):
list(resources.contents(self.anchor01, *path_parts)) list(resources.contents(self.anchor01, *path_parts))
with check_warnings((".*contents.*", DeprecationWarning)): with warnings_helper.check_warnings((".*contents.*", DeprecationWarning)):
c = resources.contents(self.anchor01, 'subdirectory') c = resources.contents(self.anchor01, 'subdirectory')
self.assertGreaterEqual( self.assertGreaterEqual(
set(c), set(c),
{'binary.file'}, {'binary.file'},
) )
@ignore_warnings(category=DeprecationWarning) @warnings_helper.ignore_warnings(category=DeprecationWarning)
def test_common_errors(self): def test_common_errors(self):
for func in ( for func in (
resources.read_text, resources.read_text,
@ -227,16 +240,16 @@ class FunctionalAPIBase:
class FunctionalAPITest_StringAnchor( class FunctionalAPITest_StringAnchor(
unittest.TestCase,
FunctionalAPIBase,
StringAnchorMixin, StringAnchorMixin,
FunctionalAPIBase,
unittest.TestCase,
): ):
pass pass
class FunctionalAPITest_ModuleAnchor( class FunctionalAPITest_ModuleAnchor(
unittest.TestCase,
FunctionalAPIBase,
ModuleAnchorMixin, ModuleAnchorMixin,
FunctionalAPIBase,
unittest.TestCase,
): ):
pass pass

View file

@ -1,7 +1,6 @@
import unittest import unittest
import importlib_resources as resources import importlib_resources as resources
from . import data01
from . import util from . import util
@ -65,16 +64,12 @@ class OpenTests:
target.open(encoding='utf-8') target.open(encoding='utf-8')
class OpenDiskTests(OpenTests, unittest.TestCase): class OpenDiskTests(OpenTests, util.DiskSetup, unittest.TestCase):
def setUp(self): pass
self.data = data01
class OpenDiskNamespaceTests(OpenTests, unittest.TestCase): class OpenDiskNamespaceTests(OpenTests, util.DiskSetup, unittest.TestCase):
def setUp(self): MODULE = 'namespacedata01'
from . import namespacedata01
self.data = namespacedata01
class OpenZipTests(OpenTests, util.ZipSetup, unittest.TestCase): class OpenZipTests(OpenTests, util.ZipSetup, unittest.TestCase):
@ -82,7 +77,7 @@ class OpenZipTests(OpenTests, util.ZipSetup, unittest.TestCase):
class OpenNamespaceZipTests(OpenTests, util.ZipSetup, unittest.TestCase): class OpenNamespaceZipTests(OpenTests, util.ZipSetup, unittest.TestCase):
ZIP_MODULE = 'namespacedata01' MODULE = 'namespacedata01'
if __name__ == '__main__': if __name__ == '__main__':

View file

@ -3,7 +3,6 @@ import pathlib
import unittest import unittest
import importlib_resources as resources import importlib_resources as resources
from . import data01
from . import util from . import util
@ -25,9 +24,7 @@ class PathTests:
self.assertEqual('Hello, UTF-8 world!\n', path.read_text(encoding='utf-8')) self.assertEqual('Hello, UTF-8 world!\n', path.read_text(encoding='utf-8'))
class PathDiskTests(PathTests, unittest.TestCase): class PathDiskTests(PathTests, util.DiskSetup, unittest.TestCase):
data = data01
def test_natural_path(self): def test_natural_path(self):
""" """
Guarantee the internal implementation detail that Guarantee the internal implementation detail that

View file

@ -1,7 +1,6 @@
import unittest import unittest
import importlib_resources as resources import importlib_resources as resources
from . import data01
from . import util from . import util
from importlib import import_module from importlib import import_module
@ -52,8 +51,8 @@ class ReadTests:
) )
class ReadDiskTests(ReadTests, unittest.TestCase): class ReadDiskTests(ReadTests, util.DiskSetup, unittest.TestCase):
data = data01 pass
class ReadZipTests(ReadTests, util.ZipSetup, unittest.TestCase): class ReadZipTests(ReadTests, util.ZipSetup, unittest.TestCase):
@ -69,15 +68,12 @@ class ReadZipTests(ReadTests, util.ZipSetup, unittest.TestCase):
self.assertEqual(result, bytes(range(4, 8))) self.assertEqual(result, bytes(range(4, 8)))
class ReadNamespaceTests(ReadTests, unittest.TestCase): class ReadNamespaceTests(ReadTests, util.DiskSetup, unittest.TestCase):
def setUp(self): MODULE = 'namespacedata01'
from . import namespacedata01
self.data = namespacedata01
class ReadNamespaceZipTests(ReadTests, util.ZipSetup, unittest.TestCase): class ReadNamespaceZipTests(ReadTests, util.ZipSetup, unittest.TestCase):
ZIP_MODULE = 'namespacedata01' MODULE = 'namespacedata01'
def test_read_submodule_resource(self): def test_read_submodule_resource(self):
submodule = import_module('namespacedata01.subdirectory') submodule = import_module('namespacedata01.subdirectory')

View file

@ -1,16 +1,21 @@
import os.path import os.path
import sys
import pathlib import pathlib
import unittest import unittest
from importlib import import_module from importlib import import_module
from importlib_resources.readers import MultiplexedPath, NamespaceReader from importlib_resources.readers import MultiplexedPath, NamespaceReader
from . import util
class MultiplexedPathTest(unittest.TestCase):
@classmethod class MultiplexedPathTest(util.DiskSetup, unittest.TestCase):
def setUpClass(cls): MODULE = 'namespacedata01'
cls.folder = pathlib.Path(__file__).parent / 'namespacedata01'
def setUp(self):
super().setUp()
self.folder = pathlib.Path(self.data.__path__[0])
self.data01 = pathlib.Path(self.load_fixture('data01').__file__).parent
self.data02 = pathlib.Path(self.load_fixture('data02').__file__).parent
def test_init_no_paths(self): def test_init_no_paths(self):
with self.assertRaises(FileNotFoundError): with self.assertRaises(FileNotFoundError):
@ -31,9 +36,8 @@ class MultiplexedPathTest(unittest.TestCase):
) )
def test_iterdir_duplicate(self): def test_iterdir_duplicate(self):
data01 = pathlib.Path(__file__).parent.joinpath('data01')
contents = { contents = {
path.name for path in MultiplexedPath(self.folder, data01).iterdir() path.name for path in MultiplexedPath(self.folder, self.data01).iterdir()
} }
for remove in ('__pycache__', '__init__.pyc'): for remove in ('__pycache__', '__init__.pyc'):
try: try:
@ -61,9 +65,8 @@ class MultiplexedPathTest(unittest.TestCase):
path.open() path.open()
def test_join_path(self): def test_join_path(self):
data01 = pathlib.Path(__file__).parent.joinpath('data01') prefix = str(self.folder.parent)
prefix = str(data01.parent) path = MultiplexedPath(self.folder, self.data01)
path = MultiplexedPath(self.folder, data01)
self.assertEqual( self.assertEqual(
str(path.joinpath('binary.file'))[len(prefix) + 1 :], str(path.joinpath('binary.file'))[len(prefix) + 1 :],
os.path.join('namespacedata01', 'binary.file'), os.path.join('namespacedata01', 'binary.file'),
@ -83,10 +86,8 @@ class MultiplexedPathTest(unittest.TestCase):
assert not path.joinpath('imaginary/foo.py').exists() assert not path.joinpath('imaginary/foo.py').exists()
def test_join_path_common_subdir(self): def test_join_path_common_subdir(self):
data01 = pathlib.Path(__file__).parent.joinpath('data01') prefix = str(self.data02.parent)
data02 = pathlib.Path(__file__).parent.joinpath('data02') path = MultiplexedPath(self.data01, self.data02)
prefix = str(data01.parent)
path = MultiplexedPath(data01, data02)
self.assertIsInstance(path.joinpath('subdirectory'), MultiplexedPath) self.assertIsInstance(path.joinpath('subdirectory'), MultiplexedPath)
self.assertEqual( self.assertEqual(
str(path.joinpath('subdirectory', 'subsubdir'))[len(prefix) + 1 :], str(path.joinpath('subdirectory', 'subsubdir'))[len(prefix) + 1 :],
@ -106,16 +107,8 @@ class MultiplexedPathTest(unittest.TestCase):
) )
class NamespaceReaderTest(unittest.TestCase): class NamespaceReaderTest(util.DiskSetup, unittest.TestCase):
site_dir = str(pathlib.Path(__file__).parent) MODULE = 'namespacedata01'
@classmethod
def setUpClass(cls):
sys.path.append(cls.site_dir)
@classmethod
def tearDownClass(cls):
sys.path.remove(cls.site_dir)
def test_init_error(self): def test_init_error(self):
with self.assertRaises(ValueError): with self.assertRaises(ValueError):
@ -125,7 +118,7 @@ class NamespaceReaderTest(unittest.TestCase):
namespacedata01 = import_module('namespacedata01') namespacedata01 = import_module('namespacedata01')
reader = NamespaceReader(namespacedata01.__spec__.submodule_search_locations) reader = NamespaceReader(namespacedata01.__spec__.submodule_search_locations)
root = os.path.abspath(os.path.join(__file__, '..', 'namespacedata01')) root = self.data.__path__[0]
self.assertEqual( self.assertEqual(
reader.resource_path('binary.file'), os.path.join(root, 'binary.file') reader.resource_path('binary.file'), os.path.join(root, 'binary.file')
) )
@ -134,9 +127,8 @@ class NamespaceReaderTest(unittest.TestCase):
) )
def test_files(self): def test_files(self):
namespacedata01 = import_module('namespacedata01') reader = NamespaceReader(self.data.__spec__.submodule_search_locations)
reader = NamespaceReader(namespacedata01.__spec__.submodule_search_locations) root = self.data.__path__[0]
root = os.path.abspath(os.path.join(__file__, '..', 'namespacedata01'))
self.assertIsInstance(reader.files(), MultiplexedPath) self.assertIsInstance(reader.files(), MultiplexedPath)
self.assertEqual(repr(reader.files()), f"MultiplexedPath('{root}')") self.assertEqual(repr(reader.files()), f"MultiplexedPath('{root}')")

View file

@ -1,9 +1,6 @@
import sys
import unittest import unittest
import importlib_resources as resources import importlib_resources as resources
import pathlib
from . import data01
from . import util from . import util
from importlib import import_module from importlib import import_module
@ -25,9 +22,8 @@ class ResourceTests:
self.assertTrue(target.is_dir()) self.assertTrue(target.is_dir())
class ResourceDiskTests(ResourceTests, unittest.TestCase): class ResourceDiskTests(ResourceTests, util.DiskSetup, unittest.TestCase):
def setUp(self): pass
self.data = data01
class ResourceZipTests(ResourceTests, util.ZipSetup, unittest.TestCase): class ResourceZipTests(ResourceTests, util.ZipSetup, unittest.TestCase):
@ -38,33 +34,39 @@ def names(traversable):
return {item.name for item in traversable.iterdir()} return {item.name for item in traversable.iterdir()}
class ResourceLoaderTests(unittest.TestCase): class ResourceLoaderTests(util.DiskSetup, unittest.TestCase):
def test_resource_contents(self): def test_resource_contents(self):
package = util.create_package( package = util.create_package(
file=data01, path=data01.__file__, contents=['A', 'B', 'C'] file=self.data, path=self.data.__file__, contents=['A', 'B', 'C']
) )
self.assertEqual(names(resources.files(package)), {'A', 'B', 'C'}) self.assertEqual(names(resources.files(package)), {'A', 'B', 'C'})
def test_is_file(self): def test_is_file(self):
package = util.create_package( package = util.create_package(
file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F'] file=self.data,
path=self.data.__file__,
contents=['A', 'B', 'C', 'D/E', 'D/F'],
) )
self.assertTrue(resources.files(package).joinpath('B').is_file()) self.assertTrue(resources.files(package).joinpath('B').is_file())
def test_is_dir(self): def test_is_dir(self):
package = util.create_package( package = util.create_package(
file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F'] file=self.data,
path=self.data.__file__,
contents=['A', 'B', 'C', 'D/E', 'D/F'],
) )
self.assertTrue(resources.files(package).joinpath('D').is_dir()) self.assertTrue(resources.files(package).joinpath('D').is_dir())
def test_resource_missing(self): def test_resource_missing(self):
package = util.create_package( package = util.create_package(
file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F'] file=self.data,
path=self.data.__file__,
contents=['A', 'B', 'C', 'D/E', 'D/F'],
) )
self.assertFalse(resources.files(package).joinpath('Z').is_file()) self.assertFalse(resources.files(package).joinpath('Z').is_file())
class ResourceCornerCaseTests(unittest.TestCase): class ResourceCornerCaseTests(util.DiskSetup, unittest.TestCase):
def test_package_has_no_reader_fallback(self): def test_package_has_no_reader_fallback(self):
""" """
Test odd ball packages which: Test odd ball packages which:
@ -73,7 +75,7 @@ class ResourceCornerCaseTests(unittest.TestCase):
# 3. Are not in a zip file # 3. Are not in a zip file
""" """
module = util.create_package( module = util.create_package(
file=data01, path=data01.__file__, contents=['A', 'B', 'C'] file=self.data, path=self.data.__file__, contents=['A', 'B', 'C']
) )
# Give the module a dummy loader. # Give the module a dummy loader.
module.__loader__ = object() module.__loader__ = object()
@ -84,9 +86,7 @@ class ResourceCornerCaseTests(unittest.TestCase):
self.assertFalse(resources.files(module).joinpath('A').is_file()) self.assertFalse(resources.files(module).joinpath('A').is_file())
class ResourceFromZipsTest01(util.ZipSetupBase, unittest.TestCase): class ResourceFromZipsTest01(util.ZipSetup, unittest.TestCase):
ZIP_MODULE = 'data01'
def test_is_submodule_resource(self): def test_is_submodule_resource(self):
submodule = import_module('data01.subdirectory') submodule = import_module('data01.subdirectory')
self.assertTrue(resources.files(submodule).joinpath('binary.file').is_file()) self.assertTrue(resources.files(submodule).joinpath('binary.file').is_file())
@ -117,8 +117,8 @@ class ResourceFromZipsTest01(util.ZipSetupBase, unittest.TestCase):
assert not data.parent.exists() assert not data.parent.exists()
class ResourceFromZipsTest02(util.ZipSetupBase, unittest.TestCase): class ResourceFromZipsTest02(util.ZipSetup, unittest.TestCase):
ZIP_MODULE = 'data02' MODULE = 'data02'
def test_unrelated_contents(self): def test_unrelated_contents(self):
""" """
@ -135,7 +135,7 @@ class ResourceFromZipsTest02(util.ZipSetupBase, unittest.TestCase):
) )
class DeletingZipsTest(util.ZipSetupBase, unittest.TestCase): class DeletingZipsTest(util.ZipSetup, unittest.TestCase):
"""Having accessed resources in a zip file should not keep an open """Having accessed resources in a zip file should not keep an open
reference to the zip. reference to the zip.
""" """
@ -217,24 +217,20 @@ class ResourceFromNamespaceTests:
self.assertEqual(contents, {'binary.file'}) self.assertEqual(contents, {'binary.file'})
class ResourceFromNamespaceDiskTests(ResourceFromNamespaceTests, unittest.TestCase): class ResourceFromNamespaceDiskTests(
site_dir = str(pathlib.Path(__file__).parent) util.DiskSetup,
@classmethod
def setUpClass(cls):
sys.path.append(cls.site_dir)
@classmethod
def tearDownClass(cls):
sys.path.remove(cls.site_dir)
class ResourceFromNamespaceZipTests(
util.ZipSetupBase,
ResourceFromNamespaceTests, ResourceFromNamespaceTests,
unittest.TestCase, unittest.TestCase,
): ):
ZIP_MODULE = 'namespacedata01' MODULE = 'namespacedata01'
class ResourceFromNamespaceZipTests(
util.ZipSetup,
ResourceFromNamespaceTests,
unittest.TestCase,
):
MODULE = 'namespacedata01'
if __name__ == '__main__': if __name__ == '__main__':

View file

@ -6,10 +6,10 @@ import types
import pathlib import pathlib
import contextlib import contextlib
from . import data01
from ..abc import ResourceReader from ..abc import ResourceReader
from .compat.py39 import import_helper, os_helper from .compat.py39 import import_helper, os_helper
from . import zip as zip_ from . import zip as zip_
from . import _path
from importlib.machinery import ModuleSpec from importlib.machinery import ModuleSpec
@ -68,7 +68,7 @@ def create_package(file=None, path=None, is_package=True, contents=()):
) )
class CommonTests(metaclass=abc.ABCMeta): class CommonTestsBase(metaclass=abc.ABCMeta):
""" """
Tests shared by test_open, test_path, and test_read. Tests shared by test_open, test_path, and test_read.
""" """
@ -84,34 +84,34 @@ class CommonTests(metaclass=abc.ABCMeta):
""" """
Passing in the package name should succeed. Passing in the package name should succeed.
""" """
self.execute(data01.__name__, 'utf-8.file') self.execute(self.data.__name__, 'utf-8.file')
def test_package_object(self): def test_package_object(self):
""" """
Passing in the package itself should succeed. Passing in the package itself should succeed.
""" """
self.execute(data01, 'utf-8.file') self.execute(self.data, 'utf-8.file')
def test_string_path(self): def test_string_path(self):
""" """
Passing in a string for the path should succeed. Passing in a string for the path should succeed.
""" """
path = 'utf-8.file' path = 'utf-8.file'
self.execute(data01, path) self.execute(self.data, path)
def test_pathlib_path(self): def test_pathlib_path(self):
""" """
Passing in a pathlib.PurePath object for the path should succeed. Passing in a pathlib.PurePath object for the path should succeed.
""" """
path = pathlib.PurePath('utf-8.file') path = pathlib.PurePath('utf-8.file')
self.execute(data01, path) self.execute(self.data, path)
def test_importing_module_as_side_effect(self): def test_importing_module_as_side_effect(self):
""" """
The anchor package can already be imported. The anchor package can already be imported.
""" """
del sys.modules[data01.__name__] del sys.modules[self.data.__name__]
self.execute(data01.__name__, 'utf-8.file') self.execute(self.data.__name__, 'utf-8.file')
def test_missing_path(self): def test_missing_path(self):
""" """
@ -141,24 +141,66 @@ class CommonTests(metaclass=abc.ABCMeta):
self.execute(package, 'utf-8.file') self.execute(package, 'utf-8.file')
class ZipSetupBase: fixtures = dict(
ZIP_MODULE = 'data01' data01={
'__init__.py': '',
'binary.file': bytes(range(4)),
'utf-16.file': 'Hello, UTF-16 world!\n'.encode('utf-16'),
'utf-8.file': 'Hello, UTF-8 world!\n'.encode('utf-8'),
'subdirectory': {
'__init__.py': '',
'binary.file': bytes(range(4, 8)),
},
},
data02={
'__init__.py': '',
'one': {'__init__.py': '', 'resource1.txt': 'one resource'},
'two': {'__init__.py': '', 'resource2.txt': 'two resource'},
'subdirectory': {'subsubdir': {'resource.txt': 'a resource'}},
},
namespacedata01={
'binary.file': bytes(range(4)),
'utf-16.file': 'Hello, UTF-16 world!\n'.encode('utf-16'),
'utf-8.file': 'Hello, UTF-8 world!\n'.encode('utf-8'),
'subdirectory': {
'binary.file': bytes(range(12, 16)),
},
},
)
class ModuleSetup:
def setUp(self): def setUp(self):
self.fixtures = contextlib.ExitStack() self.fixtures = contextlib.ExitStack()
self.addCleanup(self.fixtures.close) self.addCleanup(self.fixtures.close)
self.fixtures.enter_context(import_helper.isolated_modules()) self.fixtures.enter_context(import_helper.isolated_modules())
self.data = self.load_fixture(self.MODULE)
def load_fixture(self, module):
self.tree_on_path({module: fixtures[module]})
return importlib.import_module(module)
class ZipSetup(ModuleSetup):
MODULE = 'data01'
def tree_on_path(self, spec):
temp_dir = self.fixtures.enter_context(os_helper.temp_dir()) temp_dir = self.fixtures.enter_context(os_helper.temp_dir())
modules = pathlib.Path(temp_dir) / 'zipped modules.zip' modules = pathlib.Path(temp_dir) / 'zipped modules.zip'
src_path = pathlib.Path(__file__).parent.joinpath(self.ZIP_MODULE)
self.fixtures.enter_context( self.fixtures.enter_context(
import_helper.DirsOnSysPath(str(zip_.make_zip_file(src_path, modules))) import_helper.DirsOnSysPath(str(zip_.make_zip_file(spec, modules)))
) )
self.data = importlib.import_module(self.ZIP_MODULE)
class DiskSetup(ModuleSetup):
MODULE = 'data01'
def tree_on_path(self, spec):
temp_dir = self.fixtures.enter_context(os_helper.temp_dir())
_path.build(spec, pathlib.Path(temp_dir))
self.fixtures.enter_context(import_helper.DirsOnSysPath(temp_dir))
class ZipSetup(ZipSetupBase): class CommonTests(DiskSetup, CommonTestsBase):
pass pass

View file

@ -2,31 +2,25 @@
Generate zip test data files. Generate zip test data files.
""" """
import contextlib
import os
import pathlib
import zipfile import zipfile
import zipp import zipp
def make_zip_file(src, dst): def make_zip_file(tree, dst):
""" """
Zip the files in src into a new zipfile at dst. Zip the files in tree into a new zipfile at dst.
""" """
with zipfile.ZipFile(dst, 'w') as zf: with zipfile.ZipFile(dst, 'w') as zf:
for src_path, rel in walk(src): for name, contents in walk(tree):
dst_name = src.name / pathlib.PurePosixPath(rel.as_posix()) zf.writestr(name, contents)
zf.write(src_path, dst_name)
zipp.CompleteDirs.inject(zf) zipp.CompleteDirs.inject(zf)
return dst return dst
def walk(datapath): def walk(tree, prefix=''):
for dirpath, dirnames, filenames in os.walk(datapath): for name, contents in tree.items():
with contextlib.suppress(ValueError): if isinstance(contents, dict):
dirnames.remove('__pycache__') yield from walk(contents, prefix=f'{prefix}{name}/')
for filename in filenames: else:
res = pathlib.Path(dirpath) / filename yield f'{prefix}{name}', contents
rel = res.relative_to(datapath)
yield res, rel

View file

@ -193,6 +193,7 @@ class Artist(
similar (List<:class:`~plexapi.media.Similar`>): List of similar objects. similar (List<:class:`~plexapi.media.Similar`>): List of similar objects.
styles (List<:class:`~plexapi.media.Style`>): List of style objects. styles (List<:class:`~plexapi.media.Style`>): List of style objects.
theme (str): URL to theme resource (/library/metadata/<ratingkey>/theme/<themeid>). theme (str): URL to theme resource (/library/metadata/<ratingkey>/theme/<themeid>).
ultraBlurColors (:class:`~plexapi.media.UltraBlurColors`): Ultra blur color object.
""" """
TAG = 'Directory' TAG = 'Directory'
TYPE = 'artist' TYPE = 'artist'
@ -213,6 +214,7 @@ class Artist(
self.similar = self.findItems(data, media.Similar) self.similar = self.findItems(data, media.Similar)
self.styles = self.findItems(data, media.Style) self.styles = self.findItems(data, media.Style)
self.theme = data.attrib.get('theme') self.theme = data.attrib.get('theme')
self.ultraBlurColors = self.findItem(data, media.UltraBlurColors)
def __iter__(self): def __iter__(self):
for album in self.albums(): for album in self.albums():
@ -281,6 +283,21 @@ class Artist(
filepaths += track.download(_savepath, keep_original_name, **kwargs) filepaths += track.download(_savepath, keep_original_name, **kwargs)
return filepaths return filepaths
def popularTracks(self):
""" Returns a list of :class:`~plexapi.audio.Track` popular tracks by the artist. """
filters = {
'album.subformat!': 'Compilation,Live',
'artist.id': self.ratingKey,
'group': 'title',
'ratingCount>>': 0,
}
return self.section().search(
libtype='track',
filters=filters,
sort='ratingCount:desc',
limit=100
)
def station(self): def station(self):
""" Returns a :class:`~plexapi.playlist.Playlist` artist radio station or `None`. """ """ Returns a :class:`~plexapi.playlist.Playlist` artist radio station or `None`. """
key = f'{self.key}?includeStations=1' key = f'{self.key}?includeStations=1'
@ -325,6 +342,7 @@ class Album(
studio (str): Studio that released the album. studio (str): Studio that released the album.
styles (List<:class:`~plexapi.media.Style`>): List of style objects. styles (List<:class:`~plexapi.media.Style`>): List of style objects.
subformats (List<:class:`~plexapi.media.Subformat`>): List of subformat objects. subformats (List<:class:`~plexapi.media.Subformat`>): List of subformat objects.
ultraBlurColors (:class:`~plexapi.media.UltraBlurColors`): Ultra blur color object.
viewedLeafCount (int): Number of items marked as played in the album view. viewedLeafCount (int): Number of items marked as played in the album view.
year (int): Year the album was released. year (int): Year the album was released.
""" """
@ -354,6 +372,7 @@ class Album(
self.studio = data.attrib.get('studio') self.studio = data.attrib.get('studio')
self.styles = self.findItems(data, media.Style) self.styles = self.findItems(data, media.Style)
self.subformats = self.findItems(data, media.Subformat) self.subformats = self.findItems(data, media.Subformat)
self.ultraBlurColors = self.findItem(data, media.UltraBlurColors)
self.viewedLeafCount = utils.cast(int, data.attrib.get('viewedLeafCount')) self.viewedLeafCount = utils.cast(int, data.attrib.get('viewedLeafCount'))
self.year = utils.cast(int, data.attrib.get('year')) self.year = utils.cast(int, data.attrib.get('year'))

View file

@ -3,7 +3,7 @@ import re
from typing import TYPE_CHECKING, Generic, Iterable, List, Optional, TypeVar, Union from typing import TYPE_CHECKING, Generic, Iterable, List, Optional, TypeVar, Union
import weakref import weakref
from functools import cached_property from functools import cached_property
from urllib.parse import urlencode from urllib.parse import parse_qsl, urlencode, urlparse
from xml.etree import ElementTree from xml.etree import ElementTree
from xml.etree.ElementTree import Element from xml.etree.ElementTree import Element
@ -391,10 +391,9 @@ class PlexObject:
Parameters: Parameters:
key (string, optional): Override the key to reload. key (string, optional): Override the key to reload.
**kwargs (dict): A dictionary of XML include parameters to exclude or override. **kwargs (dict): A dictionary of XML include parameters to include/exclude or override.
All parameters are included by default with the option to override each parameter
or disable each parameter individually by setting it to False or 0.
See :class:`~plexapi.base.PlexPartialObject` for all the available include parameters. See :class:`~plexapi.base.PlexPartialObject` for all the available include parameters.
Set parameter to True to include and False to exclude.
Example: Example:
@ -402,20 +401,28 @@ class PlexObject:
from plexapi.server import PlexServer from plexapi.server import PlexServer
plex = PlexServer('http://localhost:32400', token='xxxxxxxxxxxxxxxxxxxx') plex = PlexServer('http://localhost:32400', token='xxxxxxxxxxxxxxxxxxxx')
movie = plex.library.section('Movies').get('Cars')
# Partial reload of the movie without the `checkFiles` parameter. # Search results are partial objects.
# Excluding `checkFiles` will prevent the Plex server from reading the movie = plex.library.section('Movies').get('Cars')
# file to check if the file still exists and is accessible.
# The movie object will remain as a partial object.
movie.reload(checkFiles=False)
movie.isPartialObject() # Returns True movie.isPartialObject() # Returns True
# Full reload of the movie with all include parameters. # Partial reload of the movie without a default include parameter.
# The movie object will remain as a partial object.
movie.reload(includeMarkers=False)
movie.isPartialObject() # Returns True
# Full reload of the movie with all default include parameters.
# The movie object will be a full object. # The movie object will be a full object.
movie.reload() movie.reload()
movie.isFullObject() # Returns True movie.isFullObject() # Returns True
# Full reload of the movie with all default and extra include parameter.
# Including `checkFiles` will tell the Plex server to check if the file
# still exists and is accessible.
# The movie object will be a full object.
movie.reload(checkFiles=True)
movie.isFullObject() # Returns True
""" """
return self._reload(key=key, **kwargs) return self._reload(key=key, **kwargs)
@ -505,25 +512,25 @@ class PlexPartialObject(PlexObject):
automatically and update itself. automatically and update itself.
""" """
_INCLUDES = { _INCLUDES = {
'checkFiles': 1, 'checkFiles': 0,
'includeAllConcerts': 1, 'includeAllConcerts': 0,
'includeBandwidths': 1, 'includeBandwidths': 1,
'includeChapters': 1, 'includeChapters': 1,
'includeChildren': 1, 'includeChildren': 0,
'includeConcerts': 1, 'includeConcerts': 0,
'includeExternalMedia': 1, 'includeExternalMedia': 0,
'includeExtras': 1, 'includeExtras': 0,
'includeFields': 'thumbBlurHash,artBlurHash', 'includeFields': 'thumbBlurHash,artBlurHash',
'includeGeolocation': 1, 'includeGeolocation': 1,
'includeLoudnessRamps': 1, 'includeLoudnessRamps': 1,
'includeMarkers': 1, 'includeMarkers': 1,
'includeOnDeck': 1, 'includeOnDeck': 0,
'includePopularLeaves': 1, 'includePopularLeaves': 0,
'includePreferences': 1, 'includePreferences': 0,
'includeRelated': 1, 'includeRelated': 0,
'includeRelatedCount': 1, 'includeRelatedCount': 0,
'includeReviews': 1, 'includeReviews': 0,
'includeStations': 1, 'includeStations': 0,
} }
_EXCLUDES = { _EXCLUDES = {
'excludeElements': ( 'excludeElements': (
@ -592,7 +599,11 @@ class PlexPartialObject(PlexObject):
search result for a movie often only contain a portion of the attributes a full search result for a movie often only contain a portion of the attributes a full
object (main url) for that movie would contain. object (main url) for that movie would contain.
""" """
return not self.key or (self._details_key or self.key) == self._initpath parsed_key = urlparse(self._details_key or self.key)
parsed_initpath = urlparse(self._initpath)
query_key = set(parse_qsl(parsed_key.query))
query_init = set(parse_qsl(parsed_initpath.query))
return not self.key or (parsed_key.path == parsed_initpath.path and query_key <= query_init)
def isPartialObject(self): def isPartialObject(self):
""" Returns True if this is not a full object. """ """ Returns True if this is not a full object. """

View file

@ -197,7 +197,7 @@ class PlexClient(PlexObject):
raise NotFound(message) raise NotFound(message)
else: else:
raise BadRequest(message) raise BadRequest(message)
data = response.text.encode('utf8') data = utils.cleanXMLString(response.text).encode('utf8')
return ElementTree.fromstring(data) if data.strip() else None return ElementTree.fromstring(data) if data.strip() else None
def sendCommand(self, command, proxy=None, **params): def sendCommand(self, command, proxy=None, **params):

View file

@ -60,6 +60,7 @@ class Collection(
title (str): Name of the collection. title (str): Name of the collection.
titleSort (str): Title to use when sorting (defaults to title). titleSort (str): Title to use when sorting (defaults to title).
type (str): 'collection' type (str): 'collection'
ultraBlurColors (:class:`~plexapi.media.UltraBlurColors`): Ultra blur color object.
updatedAt (datetime): Datetime the collection was updated. updatedAt (datetime): Datetime the collection was updated.
userRating (float): Rating of the collection (0.0 - 10.0) equaling (0 stars - 5 stars). userRating (float): Rating of the collection (0.0 - 10.0) equaling (0 stars - 5 stars).
""" """
@ -102,6 +103,7 @@ class Collection(
self.title = data.attrib.get('title') self.title = data.attrib.get('title')
self.titleSort = data.attrib.get('titleSort', self.title) self.titleSort = data.attrib.get('titleSort', self.title)
self.type = data.attrib.get('type') self.type = data.attrib.get('type')
self.ultraBlurColors = self.findItem(data, media.UltraBlurColors)
self.updatedAt = utils.toDatetime(data.attrib.get('updatedAt')) self.updatedAt = utils.toDatetime(data.attrib.get('updatedAt'))
self.userRating = utils.cast(float, data.attrib.get('userRating')) self.userRating = utils.cast(float, data.attrib.get('userRating'))
self._items = None # cache for self.items self._items = None # cache for self.items

View file

@ -4,6 +4,6 @@
# Library version # Library version
MAJOR_VERSION = 4 MAJOR_VERSION = 4
MINOR_VERSION = 15 MINOR_VERSION = 15
PATCH_VERSION = 15 PATCH_VERSION = 16
__short_version__ = f"{MAJOR_VERSION}.{MINOR_VERSION}" __short_version__ = f"{MAJOR_VERSION}.{MINOR_VERSION}"
__version__ = f"{__short_version__}.{PATCH_VERSION}" __version__ = f"{__short_version__}.{PATCH_VERSION}"

View file

@ -2823,7 +2823,8 @@ class FilteringType(PlexObject):
additionalFields.extend([ additionalFields.extend([
('duration', 'integer', 'Duration'), ('duration', 'integer', 'Duration'),
('viewOffset', 'integer', 'View Offset'), ('viewOffset', 'integer', 'View Offset'),
('label', 'tag', 'Label') ('label', 'tag', 'Label'),
('ratingCount', 'integer', 'Rating Count'),
]) ])
elif self.type == 'collection': elif self.type == 'collection':
additionalFields.extend([ additionalFields.extend([

View file

@ -106,12 +106,16 @@ class MediaPart(PlexObject):
Attributes: Attributes:
TAG (str): 'Part' TAG (str): 'Part'
accessible (bool): True if the file is accessible. accessible (bool): True if the file is accessible.
Requires reloading the media with ``checkFiles=True``.
Refer to :func:`~plexapi.base.PlexObject.reload`.
audioProfile (str): The audio profile of the file. audioProfile (str): The audio profile of the file.
container (str): The container type of the file (ex: avi). container (str): The container type of the file (ex: avi).
decision (str): Unknown. decision (str): Unknown.
deepAnalysisVersion (int): The Plex deep analysis version for the file. deepAnalysisVersion (int): The Plex deep analysis version for the file.
duration (int): The duration of the file in milliseconds. duration (int): The duration of the file in milliseconds.
exists (bool): True if the file exists. exists (bool): True if the file exists.
Requires reloading the media with ``checkFiles=True``.
Refer to :func:`~plexapi.base.PlexObject.reload`.
file (str): The path to this file on disk (ex: /media/Movies/Cars (2006)/Cars (2006).mkv) file (str): The path to this file on disk (ex: /media/Movies/Cars (2006)/Cars (2006).mkv)
has64bitOffsets (bool): True if the file has 64 bit offsets. has64bitOffsets (bool): True if the file has 64 bit offsets.
hasThumbnail (bool): True if the file (track) has an embedded thumbnail. hasThumbnail (bool): True if the file (track) has an embedded thumbnail.
@ -999,6 +1003,28 @@ class Review(PlexObject):
self.text = data.attrib.get('text') self.text = data.attrib.get('text')
@utils.registerPlexObject
class UltraBlurColors(PlexObject):
""" Represents a single UltraBlurColors media tag.
Attributes:
TAG (str): 'UltraBlurColors'
bottomLeft (str): The bottom left hex color.
bottomRight (str): The bottom right hex color.
topLeft (str): The top left hex color.
topRight (str): The top right hex color.
"""
TAG = 'UltraBlurColors'
def _loadData(self, data):
""" Load attribute values from Plex XML response. """
self._data = data
self.bottomLeft = data.attrib.get('bottomLeft')
self.bottomRight = data.attrib.get('bottomRight')
self.topLeft = data.attrib.get('topLeft')
self.topRight = data.attrib.get('topRight')
class BaseResource(PlexObject): class BaseResource(PlexObject):
""" Base class for all Art, Poster, and Theme objects. """ Base class for all Art, Poster, and Theme objects.

View file

@ -14,8 +14,8 @@ class AdvancedSettingsMixin:
def preferences(self): def preferences(self):
""" Returns a list of :class:`~plexapi.settings.Preferences` objects. """ """ Returns a list of :class:`~plexapi.settings.Preferences` objects. """
data = self._server.query(self._details_key) key = f'{self.key}?includePreferences=1'
return self.findItems(data, settings.Preferences, rtag='Preferences') return self.fetchItems(key, cls=settings.Preferences, rtag='Preferences')
def preference(self, pref): def preference(self, pref):
""" Returns a :class:`~plexapi.settings.Preferences` object for the specified pref. """ Returns a :class:`~plexapi.settings.Preferences` object for the specified pref.
@ -240,8 +240,7 @@ class UnmatchMatchMixin:
params['agent'] = utils.getAgentIdentifier(self.section(), agent) params['agent'] = utils.getAgentIdentifier(self.section(), agent)
key = key + '?' + urlencode(params) key = key + '?' + urlencode(params)
data = self._server.query(key, method=self._server._session.get) return self.fetchItems(key, cls=media.SearchResult)
return self.findItems(data, initpath=key)
def fixMatch(self, searchResult=None, auto=False, agent=None): def fixMatch(self, searchResult=None, auto=False, agent=None):
""" Use match result to update show metadata. """ Use match result to update show metadata.
@ -278,8 +277,8 @@ class ExtrasMixin:
def extras(self): def extras(self):
""" Returns a list of :class:`~plexapi.video.Extra` objects. """ """ Returns a list of :class:`~plexapi.video.Extra` objects. """
from plexapi.video import Extra from plexapi.video import Extra
data = self._server.query(self._details_key) key = f'{self.key}/extras'
return self.findItems(data, Extra, rtag='Extras') return self.fetchItems(key, cls=Extra)
class HubsMixin: class HubsMixin:
@ -289,8 +288,7 @@ class HubsMixin:
""" Returns a list of :class:`~plexapi.library.Hub` objects. """ """ Returns a list of :class:`~plexapi.library.Hub` objects. """
from plexapi.library import Hub from plexapi.library import Hub
key = f'{self.key}/related' key = f'{self.key}/related'
data = self._server.query(key) return self.fetchItems(key, cls=Hub)
return self.findItems(data, Hub)
class PlayedUnplayedMixin: class PlayedUnplayedMixin:

View file

@ -250,7 +250,7 @@ class MyPlexAccount(PlexObject):
return response.json() return response.json()
elif 'text/plain' in response.headers.get('Content-Type', ''): elif 'text/plain' in response.headers.get('Content-Type', ''):
return response.text.strip() return response.text.strip()
data = response.text.encode('utf8') data = utils.cleanXMLString(response.text).encode('utf8')
return ElementTree.fromstring(data) if data.strip() else None return ElementTree.fromstring(data) if data.strip() else None
def ping(self): def ping(self):

View file

@ -768,7 +768,7 @@ class PlexServer(PlexObject):
raise NotFound(message) raise NotFound(message)
else: else:
raise BadRequest(message) raise BadRequest(message)
data = response.text.encode('utf8') data = utils.cleanXMLString(response.text).encode('utf8')
return ElementTree.fromstring(data) if data.strip() else None return ElementTree.fromstring(data) if data.strip() else None
def search(self, query, mediatype=None, limit=None, sectionId=None): def search(self, query, mediatype=None, limit=None, sectionId=None):

View file

@ -6,6 +6,7 @@ import logging
import os import os
import re import re
import string import string
import sys
import time import time
import unicodedata import unicodedata
import warnings import warnings
@ -673,3 +674,45 @@ def openOrRead(file):
def sha1hash(guid): def sha1hash(guid):
""" Return the SHA1 hash of a guid. """ """ Return the SHA1 hash of a guid. """
return sha1(guid.encode('utf-8')).hexdigest() return sha1(guid.encode('utf-8')).hexdigest()
# https://stackoverflow.com/a/64570125
_illegal_XML_characters = [
(0x00, 0x08),
(0x0B, 0x0C),
(0x0E, 0x1F),
(0x7F, 0x84),
(0x86, 0x9F),
(0xFDD0, 0xFDDF),
(0xFFFE, 0xFFFF),
]
if sys.maxunicode >= 0x10000: # not narrow build
_illegal_XML_characters.extend(
[
(0x1FFFE, 0x1FFFF),
(0x2FFFE, 0x2FFFF),
(0x3FFFE, 0x3FFFF),
(0x4FFFE, 0x4FFFF),
(0x5FFFE, 0x5FFFF),
(0x6FFFE, 0x6FFFF),
(0x7FFFE, 0x7FFFF),
(0x8FFFE, 0x8FFFF),
(0x9FFFE, 0x9FFFF),
(0xAFFFE, 0xAFFFF),
(0xBFFFE, 0xBFFFF),
(0xCFFFE, 0xCFFFF),
(0xDFFFE, 0xDFFFF),
(0xEFFFE, 0xEFFFF),
(0xFFFFE, 0xFFFFF),
(0x10FFFE, 0x10FFFF),
]
)
_illegal_XML_ranges = [
fr'{chr(low)}-{chr(high)}'
for (low, high) in _illegal_XML_characters
]
_illegal_XML_re = re.compile(fr'[{"".join(_illegal_XML_ranges)}]')
def cleanXMLString(s):
return _illegal_XML_re.sub('', s)

View file

@ -375,6 +375,7 @@ class Movie(
studio (str): Studio that created movie (Di Bonaventura Pictures; 21 Laps Entertainment). studio (str): Studio that created movie (Di Bonaventura Pictures; 21 Laps Entertainment).
tagline (str): Movie tag line (Back 2 Work; Who says men can't change?). tagline (str): Movie tag line (Back 2 Work; Who says men can't change?).
theme (str): URL to theme resource (/library/metadata/<ratingkey>/theme/<themeid>). theme (str): URL to theme resource (/library/metadata/<ratingkey>/theme/<themeid>).
ultraBlurColors (:class:`~plexapi.media.UltraBlurColors`): Ultra blur color object.
useOriginalTitle (int): Setting that indicates if the original title is used for the movie useOriginalTitle (int): Setting that indicates if the original title is used for the movie
(-1 = Library default, 0 = No, 1 = Yes). (-1 = Library default, 0 = No, 1 = Yes).
viewOffset (int): View offset in milliseconds. viewOffset (int): View offset in milliseconds.
@ -420,6 +421,7 @@ class Movie(
self.studio = data.attrib.get('studio') self.studio = data.attrib.get('studio')
self.tagline = data.attrib.get('tagline') self.tagline = data.attrib.get('tagline')
self.theme = data.attrib.get('theme') self.theme = data.attrib.get('theme')
self.ultraBlurColors = self.findItem(data, media.UltraBlurColors)
self.useOriginalTitle = utils.cast(int, data.attrib.get('useOriginalTitle', '-1')) self.useOriginalTitle = utils.cast(int, data.attrib.get('useOriginalTitle', '-1'))
self.viewOffset = utils.cast(int, data.attrib.get('viewOffset', 0)) self.viewOffset = utils.cast(int, data.attrib.get('viewOffset', 0))
self.writers = self.findItems(data, media.Writer) self.writers = self.findItems(data, media.Writer)
@ -456,8 +458,8 @@ class Movie(
def reviews(self): def reviews(self):
""" Returns a list of :class:`~plexapi.media.Review` objects. """ """ Returns a list of :class:`~plexapi.media.Review` objects. """
data = self._server.query(self._details_key) key = f'{self.key}?includeReviews=1'
return self.findItems(data, media.Review, rtag='Video') return self.fetchItems(key, cls=media.Review, rtag='Video')
def editions(self): def editions(self):
""" Returns a list of :class:`~plexapi.video.Movie` objects """ Returns a list of :class:`~plexapi.video.Movie` objects
@ -543,6 +545,7 @@ class Show(
(-1 = Account default, 0 = Manually selected, 1 = Shown with foreign audio, 2 = Always enabled). (-1 = Account default, 0 = Manually selected, 1 = Shown with foreign audio, 2 = Always enabled).
tagline (str): Show tag line. tagline (str): Show tag line.
theme (str): URL to theme resource (/library/metadata/<ratingkey>/theme/<themeid>). theme (str): URL to theme resource (/library/metadata/<ratingkey>/theme/<themeid>).
ultraBlurColors (:class:`~plexapi.media.UltraBlurColors`): Ultra blur color object.
useOriginalTitle (int): Setting that indicates if the original title is used for the show useOriginalTitle (int): Setting that indicates if the original title is used for the show
(-1 = Library default, 0 = No, 1 = Yes). (-1 = Library default, 0 = No, 1 = Yes).
viewedLeafCount (int): Number of items marked as played in the show view. viewedLeafCount (int): Number of items marked as played in the show view.
@ -592,6 +595,7 @@ class Show(
self.subtitleMode = utils.cast(int, data.attrib.get('subtitleMode', '-1')) self.subtitleMode = utils.cast(int, data.attrib.get('subtitleMode', '-1'))
self.tagline = data.attrib.get('tagline') self.tagline = data.attrib.get('tagline')
self.theme = data.attrib.get('theme') self.theme = data.attrib.get('theme')
self.ultraBlurColors = self.findItem(data, media.UltraBlurColors)
self.useOriginalTitle = utils.cast(int, data.attrib.get('useOriginalTitle', '-1')) self.useOriginalTitle = utils.cast(int, data.attrib.get('useOriginalTitle', '-1'))
self.viewedLeafCount = utils.cast(int, data.attrib.get('viewedLeafCount')) self.viewedLeafCount = utils.cast(int, data.attrib.get('viewedLeafCount'))
self.year = utils.cast(int, data.attrib.get('year')) self.year = utils.cast(int, data.attrib.get('year'))
@ -614,8 +618,8 @@ class Show(
""" Returns show's On Deck :class:`~plexapi.video.Video` object or `None`. """ Returns show's On Deck :class:`~plexapi.video.Video` object or `None`.
If show is unwatched, return will likely be the first episode. If show is unwatched, return will likely be the first episode.
""" """
data = self._server.query(self._details_key) key = f'{self.key}?includeOnDeck=1'
return next(iter(self.findItems(data, rtag='OnDeck')), None) return next(iter(self.fetchItems(key, cls=Episode, rtag='OnDeck')), None)
def season(self, title=None, season=None): def season(self, title=None, season=None):
""" Returns the season with the specified title or number. """ Returns the season with the specified title or number.
@ -735,6 +739,7 @@ class Season(
subtitleLanguage (str): Setting that indicates the preferred subtitle language. subtitleLanguage (str): Setting that indicates the preferred subtitle language.
subtitleMode (int): Setting that indicates the auto-select subtitle mode. subtitleMode (int): Setting that indicates the auto-select subtitle mode.
(-1 = Series default, 0 = Manually selected, 1 = Shown with foreign audio, 2 = Always enabled). (-1 = Series default, 0 = Manually selected, 1 = Shown with foreign audio, 2 = Always enabled).
ultraBlurColors (:class:`~plexapi.media.UltraBlurColors`): Ultra blur color object.
viewedLeafCount (int): Number of items marked as played in the season view. viewedLeafCount (int): Number of items marked as played in the season view.
year (int): Year the season was released. year (int): Year the season was released.
""" """
@ -766,6 +771,7 @@ class Season(
self.ratings = self.findItems(data, media.Rating) self.ratings = self.findItems(data, media.Rating)
self.subtitleLanguage = data.attrib.get('subtitleLanguage', '') self.subtitleLanguage = data.attrib.get('subtitleLanguage', '')
self.subtitleMode = utils.cast(int, data.attrib.get('subtitleMode', '-1')) self.subtitleMode = utils.cast(int, data.attrib.get('subtitleMode', '-1'))
self.ultraBlurColors = self.findItem(data, media.UltraBlurColors)
self.viewedLeafCount = utils.cast(int, data.attrib.get('viewedLeafCount')) self.viewedLeafCount = utils.cast(int, data.attrib.get('viewedLeafCount'))
self.year = utils.cast(int, data.attrib.get('year')) self.year = utils.cast(int, data.attrib.get('year'))
@ -796,8 +802,8 @@ class Season(
""" Returns season's On Deck :class:`~plexapi.video.Video` object or `None`. """ Returns season's On Deck :class:`~plexapi.video.Video` object or `None`.
Will only return a match if the show's On Deck episode is in this season. Will only return a match if the show's On Deck episode is in this season.
""" """
data = self._server.query(self._details_key) key = f'{self.key}?includeOnDeck=1'
return next(iter(self.findItems(data, rtag='OnDeck')), None) return next(iter(self.fetchItems(key, cls=Episode, rtag='OnDeck')), None)
def episode(self, title=None, episode=None): def episode(self, title=None, episode=None):
""" Returns the episode with the given title or number. """ Returns the episode with the given title or number.
@ -914,6 +920,7 @@ class Episode(
skipParent (bool): True if the show's seasons are set to hidden. skipParent (bool): True if the show's seasons are set to hidden.
sourceURI (str): Remote server URI (server://<machineIdentifier>/com.plexapp.plugins.library) sourceURI (str): Remote server URI (server://<machineIdentifier>/com.plexapp.plugins.library)
(remote playlist item only). (remote playlist item only).
ultraBlurColors (:class:`~plexapi.media.UltraBlurColors`): Ultra blur color object.
viewOffset (int): View offset in milliseconds. viewOffset (int): View offset in milliseconds.
writers (List<:class:`~plexapi.media.Writer`>): List of writers objects. writers (List<:class:`~plexapi.media.Writer`>): List of writers objects.
year (int): Year the episode was released. year (int): Year the episode was released.
@ -958,6 +965,7 @@ class Episode(
self.roles = self.findItems(data, media.Role) self.roles = self.findItems(data, media.Role)
self.skipParent = utils.cast(bool, data.attrib.get('skipParent', '0')) self.skipParent = utils.cast(bool, data.attrib.get('skipParent', '0'))
self.sourceURI = data.attrib.get('source') # remote playlist item self.sourceURI = data.attrib.get('source') # remote playlist item
self.ultraBlurColors = self.findItem(data, media.UltraBlurColors)
self.viewOffset = utils.cast(int, data.attrib.get('viewOffset', 0)) self.viewOffset = utils.cast(int, data.attrib.get('viewOffset', 0))
self.writers = self.findItems(data, media.Writer) self.writers = self.findItems(data, media.Writer)
self.year = utils.cast(int, data.attrib.get('year')) self.year = utils.cast(int, data.attrib.get('year'))

View file

@ -120,8 +120,8 @@ class version_info(NamedTuple):
return f"{__name__}.{type(self).__name__}({', '.join('{}={!r}'.format(*nv) for nv in zip(self._fields, self))})" return f"{__name__}.{type(self).__name__}({', '.join('{}={!r}'.format(*nv) for nv in zip(self._fields, self))})"
__version_info__ = version_info(3, 1, 2, "final", 1) __version_info__ = version_info(3, 1, 4, "final", 1)
__version_time__ = "06 Mar 2024 07:08 UTC" __version_time__ = "25 Aug 2024 14:40 UTC"
__version__ = __version_info__.__version__ __version__ = __version_info__.__version__
__versionTime__ = __version_time__ __versionTime__ = __version_time__
__author__ = "Paul McGuire <ptmcg.gm+pyparsing@gmail.com>" __author__ = "Paul McGuire <ptmcg.gm+pyparsing@gmail.com>"
@ -143,7 +143,7 @@ from .common import (
_builtin_exprs as common_builtin_exprs, _builtin_exprs as common_builtin_exprs,
) )
# define backward compat synonyms # Compatibility synonyms
if "pyparsing_unicode" not in globals(): if "pyparsing_unicode" not in globals():
pyparsing_unicode = unicode # type: ignore[misc] pyparsing_unicode = unicode # type: ignore[misc]
if "pyparsing_common" not in globals(): if "pyparsing_common" not in globals():

View file

@ -196,7 +196,7 @@ def with_class(classname, namespace=""):
return with_attribute(**{classattr: classname}) return with_attribute(**{classattr: classname})
# pre-PEP8 compatibility symbols # Compatibility synonyms
# fmt: off # fmt: off
replaceWith = replaced_by_pep8("replaceWith", replace_with) replaceWith = replaced_by_pep8("replaceWith", replace_with)
removeQuotes = replaced_by_pep8("removeQuotes", remove_quotes) removeQuotes = replaced_by_pep8("removeQuotes", remove_quotes)

View file

@ -418,20 +418,15 @@ class pyparsing_common:
# fmt: on # fmt: on
# pre-PEP8 compatibility names # pre-PEP8 compatibility names
convertToInteger = convert_to_integer # fmt: off
"""Deprecated - use :class:`convert_to_integer`""" convertToInteger = staticmethod(replaced_by_pep8("convertToInteger", convert_to_integer))
convertToFloat = convert_to_float convertToFloat = staticmethod(replaced_by_pep8("convertToFloat", convert_to_float))
"""Deprecated - use :class:`convert_to_float`""" convertToDate = staticmethod(replaced_by_pep8("convertToDate", convert_to_date))
convertToDate = convert_to_date convertToDatetime = staticmethod(replaced_by_pep8("convertToDatetime", convert_to_datetime))
"""Deprecated - use :class:`convert_to_date`""" stripHTMLTags = staticmethod(replaced_by_pep8("stripHTMLTags", strip_html_tags))
convertToDatetime = convert_to_datetime upcaseTokens = staticmethod(replaced_by_pep8("upcaseTokens", upcase_tokens))
"""Deprecated - use :class:`convert_to_datetime`""" downcaseTokens = staticmethod(replaced_by_pep8("downcaseTokens", downcase_tokens))
stripHTMLTags = strip_html_tags # fmt: on
"""Deprecated - use :class:`strip_html_tags`"""
upcaseTokens = upcase_tokens
"""Deprecated - use :class:`upcase_tokens`"""
downcaseTokens = downcase_tokens
"""Deprecated - use :class:`downcase_tokens`"""
_builtin_exprs = [ _builtin_exprs = [

File diff suppressed because it is too large Load diff

View file

@ -36,6 +36,7 @@ jinja2_template_source = """\
</head> </head>
<body> <body>
{% endif %} {% endif %}
<meta charset="UTF-8"/>
{{ body | safe }} {{ body | safe }}
{% for diagram in diagrams %} {% for diagram in diagrams %}
<div class="railroad-group"> <div class="railroad-group">
@ -89,7 +90,7 @@ class AnnotatedItem(railroad.Group):
""" """
def __init__(self, label: str, item): def __init__(self, label: str, item):
super().__init__(item=item, label="[{}]".format(label) if label else label) super().__init__(item=item, label=f"[{label}]")
class EditablePartial(Generic[T]): class EditablePartial(Generic[T]):
@ -145,7 +146,7 @@ def railroad_to_html(diagrams: List[NamedDiagram], embed=False, **kwargs) -> str
continue continue
io = StringIO() io = StringIO()
try: try:
css = kwargs.get('css') css = kwargs.get("css")
diagram.diagram.writeStandalone(io.write, css=css) diagram.diagram.writeStandalone(io.write, css=css)
except AttributeError: except AttributeError:
diagram.diagram.writeSvg(io.write) diagram.diagram.writeSvg(io.write)
@ -425,9 +426,11 @@ def _apply_diagram_item_enhancements(fn):
element_results_name = element.resultsName element_results_name = element.resultsName
if element_results_name: if element_results_name:
# add "*" to indicate if this is a "list all results" name # add "*" to indicate if this is a "list all results" name
element_results_name += "" if element.modalResults else "*" modal_tag = "" if element.modalResults else "*"
ret = EditablePartial.from_call( ret = EditablePartial.from_call(
railroad.Group, item=ret, label=element_results_name railroad.Group,
item=ret,
label=f"{repr(element_results_name)}{modal_tag}",
) )
return ret return ret
@ -534,7 +537,7 @@ def _to_diagram_element(
# (all will have the same name, and resultsName) # (all will have the same name, and resultsName)
if not exprs: if not exprs:
return None return None
if len(set((e.name, e.resultsName) for e in exprs)) == 1: if len(set((e.name, e.resultsName) for e in exprs)) == 1 and len(exprs) > 2:
ret = EditablePartial.from_call( ret = EditablePartial.from_call(
railroad.OneOrMore, item="", repeat=str(len(exprs)) railroad.OneOrMore, item="", repeat=str(len(exprs))
) )
@ -563,7 +566,7 @@ def _to_diagram_element(
if show_groups: if show_groups:
ret = EditablePartial.from_call(AnnotatedItem, label="", item="") ret = EditablePartial.from_call(AnnotatedItem, label="", item="")
else: else:
ret = EditablePartial.from_call(railroad.Group, label="", item="") ret = EditablePartial.from_call(railroad.Sequence, items=[])
elif isinstance(element, pyparsing.TokenConverter): elif isinstance(element, pyparsing.TokenConverter):
label = type(element).__name__.lower() label = type(element).__name__.lower()
if label == "tokenconverter": if label == "tokenconverter":
@ -573,8 +576,36 @@ def _to_diagram_element(
elif isinstance(element, pyparsing.Opt): elif isinstance(element, pyparsing.Opt):
ret = EditablePartial.from_call(railroad.Optional, item="") ret = EditablePartial.from_call(railroad.Optional, item="")
elif isinstance(element, pyparsing.OneOrMore): elif isinstance(element, pyparsing.OneOrMore):
ret = EditablePartial.from_call(railroad.OneOrMore, item="") if element.not_ender is not None:
args = [
parent,
lookup,
vertical,
index,
name_hint,
show_results_names,
show_groups,
]
return _to_diagram_element(
(~element.not_ender.expr + element.expr)[1, ...].set_name(element.name),
*args,
)
ret = EditablePartial.from_call(railroad.OneOrMore, item=None)
elif isinstance(element, pyparsing.ZeroOrMore): elif isinstance(element, pyparsing.ZeroOrMore):
if element.not_ender is not None:
args = [
parent,
lookup,
vertical,
index,
name_hint,
show_results_names,
show_groups,
]
return _to_diagram_element(
(~element.not_ender.expr + element.expr)[...].set_name(element.name),
*args,
)
ret = EditablePartial.from_call(railroad.ZeroOrMore, item="") ret = EditablePartial.from_call(railroad.ZeroOrMore, item="")
elif isinstance(element, pyparsing.Group): elif isinstance(element, pyparsing.Group):
ret = EditablePartial.from_call( ret = EditablePartial.from_call(

View file

@ -85,7 +85,7 @@ class ParseBaseException(Exception):
ret = [] ret = []
if isinstance(exc, ParseBaseException): if isinstance(exc, ParseBaseException):
ret.append(exc.line) ret.append(exc.line)
ret.append(" " * (exc.column - 1) + "^") ret.append(f"{' ' * (exc.column - 1)}^")
ret.append(f"{type(exc).__name__}: {exc}") ret.append(f"{type(exc).__name__}: {exc}")
if depth <= 0: if depth <= 0:
@ -245,6 +245,7 @@ class ParseBaseException(Exception):
""" """
return self.explain_exception(self, depth) return self.explain_exception(self, depth)
# Compatibility synonyms
# fmt: off # fmt: off
markInputline = replaced_by_pep8("markInputline", mark_input_line) markInputline = replaced_by_pep8("markInputline", mark_input_line)
# fmt: on # fmt: on

View file

@ -782,9 +782,12 @@ def infix_notation(
# if lpar and rpar are not suppressed, wrap in group # if lpar and rpar are not suppressed, wrap in group
if not (isinstance(lpar, Suppress) and isinstance(rpar, Suppress)): if not (isinstance(lpar, Suppress) and isinstance(rpar, Suppress)):
lastExpr = base_expr | Group(lpar + ret + rpar) lastExpr = base_expr | Group(lpar + ret + rpar).set_name(
f"nested_{base_expr.name}"
)
else: else:
lastExpr = base_expr | (lpar + ret + rpar) lastExpr = base_expr | (lpar + ret + rpar).set_name(f"nested_{base_expr.name}")
root_expr = lastExpr
arity: int arity: int
rightLeftAssoc: opAssoc rightLeftAssoc: opAssoc
@ -855,6 +858,7 @@ def infix_notation(
thisExpr <<= (matchExpr | lastExpr).setName(term_name) thisExpr <<= (matchExpr | lastExpr).setName(term_name)
lastExpr = thisExpr lastExpr = thisExpr
ret <<= lastExpr ret <<= lastExpr
root_expr.set_name("base_expr")
return ret return ret
@ -1049,7 +1053,7 @@ def delimited_list(
) )
# pre-PEP8 compatible names # Compatibility synonyms
# fmt: off # fmt: off
opAssoc = OpAssoc opAssoc = OpAssoc
anyOpenTag = any_open_tag anyOpenTag = any_open_tag

View file

@ -4,12 +4,14 @@ from collections.abc import (
Mapping, Mapping,
MutableSequence, MutableSequence,
Iterator, Iterator,
Sequence, Iterable,
Container,
) )
import pprint import pprint
from typing import Tuple, Any, Dict, Set, List from typing import Tuple, Any, Dict, Set, List
from .util import replaced_by_pep8
str_type: Tuple[type, ...] = (str, bytes) str_type: Tuple[type, ...] = (str, bytes)
_generator_type = type((_ for _ in ())) _generator_type = type((_ for _ in ()))
@ -573,20 +575,20 @@ class ParseResults:
# replace values with copies if they are of known mutable types # replace values with copies if they are of known mutable types
for i, obj in enumerate(self._toklist): for i, obj in enumerate(self._toklist):
if isinstance(obj, ParseResults): if isinstance(obj, ParseResults):
self._toklist[i] = obj.deepcopy() ret._toklist[i] = obj.deepcopy()
elif isinstance(obj, (str, bytes)): elif isinstance(obj, (str, bytes)):
pass pass
elif isinstance(obj, MutableMapping): elif isinstance(obj, MutableMapping):
self._toklist[i] = dest = type(obj)() ret._toklist[i] = dest = type(obj)()
for k, v in obj.items(): for k, v in obj.items():
dest[k] = v.deepcopy() if isinstance(v, ParseResults) else v dest[k] = v.deepcopy() if isinstance(v, ParseResults) else v
elif isinstance(obj, Container): elif isinstance(obj, Iterable):
self._toklist[i] = type(obj)( ret._toklist[i] = type(obj)(
v.deepcopy() if isinstance(v, ParseResults) else v for v in obj v.deepcopy() if isinstance(v, ParseResults) else v for v in obj
) )
return ret return ret
def get_name(self): def get_name(self) -> str:
r""" r"""
Returns the results name for this token expression. Useful when several Returns the results name for this token expression. Useful when several
different expressions might match at a particular location. different expressions might match at a particular location.

View file

@ -53,51 +53,51 @@ class unicode_set:
_ranges: UnicodeRangeList = [] _ranges: UnicodeRangeList = []
@_lazyclassproperty @_lazyclassproperty
def _chars_for_ranges(cls): def _chars_for_ranges(cls) -> List[str]:
ret = [] ret: List[int] = []
for cc in cls.__mro__: for cc in cls.__mro__:
if cc is unicode_set: if cc is unicode_set:
break break
for rr in getattr(cc, "_ranges", ()): for rr in getattr(cc, "_ranges", ()):
ret.extend(range(rr[0], rr[-1] + 1)) ret.extend(range(rr[0], rr[-1] + 1))
return [chr(c) for c in sorted(set(ret))] return sorted(chr(c) for c in set(ret))
@_lazyclassproperty @_lazyclassproperty
def printables(cls): def printables(cls) -> str:
"""all non-whitespace characters in this range""" """all non-whitespace characters in this range"""
return "".join(filterfalse(str.isspace, cls._chars_for_ranges)) return "".join(filterfalse(str.isspace, cls._chars_for_ranges))
@_lazyclassproperty @_lazyclassproperty
def alphas(cls): def alphas(cls) -> str:
"""all alphabetic characters in this range""" """all alphabetic characters in this range"""
return "".join(filter(str.isalpha, cls._chars_for_ranges)) return "".join(filter(str.isalpha, cls._chars_for_ranges))
@_lazyclassproperty @_lazyclassproperty
def nums(cls): def nums(cls) -> str:
"""all numeric digit characters in this range""" """all numeric digit characters in this range"""
return "".join(filter(str.isdigit, cls._chars_for_ranges)) return "".join(filter(str.isdigit, cls._chars_for_ranges))
@_lazyclassproperty @_lazyclassproperty
def alphanums(cls): def alphanums(cls) -> str:
"""all alphanumeric characters in this range""" """all alphanumeric characters in this range"""
return cls.alphas + cls.nums return cls.alphas + cls.nums
@_lazyclassproperty @_lazyclassproperty
def identchars(cls): def identchars(cls) -> str:
"""all characters in this range that are valid identifier characters, plus underscore '_'""" """all characters in this range that are valid identifier characters, plus underscore '_'"""
return "".join( return "".join(
sorted( sorted(
set( set(filter(str.isidentifier, cls._chars_for_ranges))
"".join(filter(str.isidentifier, cls._chars_for_ranges)) | set(
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµº" "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµº"
+ "ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿ" "ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿ"
+ "_" "_"
) )
) )
) )
@_lazyclassproperty @_lazyclassproperty
def identbodychars(cls): def identbodychars(cls) -> str:
""" """
all characters in this range that are valid identifier body characters, all characters in this range that are valid identifier body characters,
plus the digits 0-9, and · (Unicode MIDDLE DOT) plus the digits 0-9, and · (Unicode MIDDLE DOT)
@ -105,7 +105,9 @@ class unicode_set:
identifier_chars = set( identifier_chars = set(
c for c in cls._chars_for_ranges if ("_" + c).isidentifier() c for c in cls._chars_for_ranges if ("_" + c).isidentifier()
) )
return "".join(sorted(identifier_chars | set(cls.identchars + "0123456789·"))) return "".join(
sorted(identifier_chars | set(cls.identchars) | set("0123456789·"))
)
@_lazyclassproperty @_lazyclassproperty
def identifier(cls): def identifier(cls):

View file

@ -246,7 +246,7 @@ def replaced_by_pep8(compat_name: str, fn: C) -> C:
# (Presence of 'self' arg in signature is used by explain_exception() methods, so we take # (Presence of 'self' arg in signature is used by explain_exception() methods, so we take
# some extra steps to add it if present in decorated function.) # some extra steps to add it if present in decorated function.)
if "self" == list(inspect.signature(fn).parameters)[0]: if ["self"] == list(inspect.signature(fn).parameters)[:1]:
@wraps(fn) @wraps(fn)
def _inner(self, *args, **kwargs): def _inner(self, *args, **kwargs):

View file

@ -118,7 +118,7 @@ Serializing multiple objects to JSON lines (newline-delimited JSON)::
""" """
from __future__ import absolute_import from __future__ import absolute_import
__version__ = '3.19.2' __version__ = '3.19.3'
__all__ = [ __all__ = [
'dump', 'dumps', 'load', 'loads', 'dump', 'dumps', 'load', 'loads',
'JSONDecoder', 'JSONDecodeError', 'JSONEncoder', 'JSONDecoder', 'JSONDecodeError', 'JSONEncoder',

View file

@ -1,7 +1,7 @@
apscheduler==3.10.1 apscheduler==3.10.1
cryptography==43.0.0 cryptography==43.0.0
importlib-metadata==8.5.0 importlib-metadata==8.5.0
importlib-resources==6.4.0 importlib-resources==6.4.5
pyinstaller==6.8.0 pyinstaller==6.8.0
pyopenssl==24.2.1 pyopenssl==24.2.1

View file

@ -27,11 +27,10 @@ from plexpy import users
class ActivityProcessor(object): class ActivityProcessor(object):
def __init__(self):
self.db = database.MonitorDatabase()
def write_session(self, session=None, notify=True): def write_session(self, session=None, notify=True):
if session: if session:
db = database.MonitorDatabase()
values = {'session_key': session.get('session_key', ''), values = {'session_key': session.get('session_key', ''),
'session_id': session.get('session_id', ''), 'session_id': session.get('session_id', ''),
'transcode_key': session.get('transcode_key', ''), 'transcode_key': session.get('transcode_key', ''),
@ -149,7 +148,7 @@ class ActivityProcessor(object):
keys = {'session_key': session.get('session_key', ''), keys = {'session_key': session.get('session_key', ''),
'rating_key': session.get('rating_key', '')} 'rating_key': session.get('rating_key', '')}
result = self.db.upsert('sessions', values, keys) result = db.upsert('sessions', values, keys)
if result == 'insert': if result == 'insert':
# If it's our first write then time stamp it. # If it's our first write then time stamp it.
@ -159,7 +158,7 @@ class ActivityProcessor(object):
media_type=values['media_type'], media_type=values['media_type'],
started=started) started=started)
timestamp = {'started': started, 'initial_stream': initial_stream} timestamp = {'started': started, 'initial_stream': initial_stream}
self.db.upsert('sessions', timestamp, keys) db.upsert('sessions', timestamp, keys)
# Check if any notification agents have notifications enabled # Check if any notification agents have notifications enabled
if notify: if notify:
@ -260,6 +259,8 @@ class ActivityProcessor(object):
logger.debug("Tautulli ActivityProcessor :: History logging for library '%s' is disabled." % library_details['section_name']) logger.debug("Tautulli ActivityProcessor :: History logging for library '%s' is disabled." % library_details['section_name'])
if logging_enabled: if logging_enabled:
db = database.MonitorDatabase()
media_info = {} media_info = {}
# Fetch metadata first so we can return false if it fails # Fetch metadata first so we can return false if it fails
@ -316,10 +317,10 @@ class ActivityProcessor(object):
# logger.debug("Tautulli ActivityProcessor :: Writing sessionKey %s session_history transaction..." # logger.debug("Tautulli ActivityProcessor :: Writing sessionKey %s session_history transaction..."
# % session['session_key']) # % session['session_key'])
self.db.upsert(table_name='session_history', key_dict=keys, value_dict=values) db.upsert(table_name='session_history', key_dict=keys, value_dict=values)
# Get the last insert row id # Get the last insert row id
last_id = self.db.last_insert_id() last_id = db.last_insert_id()
self.group_history(last_id, session, metadata) self.group_history(last_id, session, metadata)
# logger.debug("Tautulli ActivityProcessor :: Successfully written history item, last id for session_history is %s" # logger.debug("Tautulli ActivityProcessor :: Successfully written history item, last id for session_history is %s"
@ -410,7 +411,7 @@ class ActivityProcessor(object):
# logger.debug("Tautulli ActivityProcessor :: Writing sessionKey %s session_history_media_info transaction..." # logger.debug("Tautulli ActivityProcessor :: Writing sessionKey %s session_history_media_info transaction..."
# % session['session_key']) # % session['session_key'])
self.db.upsert(table_name='session_history_media_info', key_dict=keys, value_dict=values) db.upsert(table_name='session_history_media_info', key_dict=keys, value_dict=values)
# Write the session_history_metadata table # Write the session_history_metadata table
directors = ";".join(metadata['directors']) directors = ";".join(metadata['directors'])
@ -475,7 +476,7 @@ class ActivityProcessor(object):
# logger.debug("Tautulli ActivityProcessor :: Writing sessionKey %s session_history_metadata transaction..." # logger.debug("Tautulli ActivityProcessor :: Writing sessionKey %s session_history_metadata transaction..."
# % session['session_key']) # % session['session_key'])
self.db.upsert(table_name='session_history_metadata', key_dict=keys, value_dict=values) db.upsert(table_name='session_history_metadata', key_dict=keys, value_dict=values)
# Return the session row id when the session is successfully written to the database # Return the session row id when the session is successfully written to the database
return session['id'] return session['id']
@ -484,6 +485,8 @@ class ActivityProcessor(object):
new_session = prev_session = None new_session = prev_session = None
prev_watched = None prev_watched = None
db = database.MonitorDatabase()
if session['live']: if session['live']:
# Check if we should group the session, select the last guid from the user within the last day # Check if we should group the session, select the last guid from the user within the last day
query = "SELECT session_history.id, session_history_metadata.guid, session_history.reference_id " \ query = "SELECT session_history.id, session_history_metadata.guid, session_history.reference_id " \
@ -495,7 +498,7 @@ class ActivityProcessor(object):
args = [last_id, session['user_id']] args = [last_id, session['user_id']]
result = self.db.select(query=query, args=args) result = db.select(query=query, args=args)
if len(result) > 0: if len(result) > 0:
new_session = {'id': last_id, new_session = {'id': last_id,
@ -515,7 +518,7 @@ class ActivityProcessor(object):
args = [last_id, session['user_id'], session['rating_key']] args = [last_id, session['user_id'], session['rating_key']]
result = self.db.select(query=query, args=args) result = db.select(query=query, args=args)
if len(result) > 1: if len(result) > 1:
new_session = {'id': result[0]['id'], new_session = {'id': result[0]['id'],
@ -558,9 +561,10 @@ class ActivityProcessor(object):
logger.debug("Tautulli ActivityProcessor :: Not grouping history for sessionKey %s", session['session_key']) logger.debug("Tautulli ActivityProcessor :: Not grouping history for sessionKey %s", session['session_key'])
args = [last_id, last_id] args = [last_id, last_id]
self.db.action(query=query, args=args) db.action(query=query, args=args)
def get_sessions(self, user_id=None, ip_address=None): def get_sessions(self, user_id=None, ip_address=None):
db = database.MonitorDatabase()
query = "SELECT * FROM sessions" query = "SELECT * FROM sessions"
args = [] args = []
@ -569,12 +573,13 @@ class ActivityProcessor(object):
query += " WHERE user_id = ?" + ip query += " WHERE user_id = ?" + ip
args.append(user_id) args.append(user_id)
sessions = self.db.select(query, args) sessions = db.select(query, args)
return sessions return sessions
def get_session_by_key(self, session_key=None): def get_session_by_key(self, session_key=None):
db = database.MonitorDatabase()
if str(session_key).isdigit(): if str(session_key).isdigit():
session = self.db.select_single("SELECT * FROM sessions " session = db.select_single("SELECT * FROM sessions "
"WHERE session_key = ? ", "WHERE session_key = ? ",
args=[session_key]) args=[session_key])
if session: if session:
@ -583,8 +588,9 @@ class ActivityProcessor(object):
return None return None
def get_session_by_id(self, session_id=None): def get_session_by_id(self, session_id=None):
db = database.MonitorDatabase()
if session_id: if session_id:
session = self.db.select_single("SELECT * FROM sessions " session = db.select_single("SELECT * FROM sessions "
"WHERE session_id = ? ", "WHERE session_id = ? ",
args=[session_id]) args=[session_id])
if session: if session:
@ -593,6 +599,7 @@ class ActivityProcessor(object):
return None return None
def set_session_state(self, session_key=None, state=None, **kwargs): def set_session_state(self, session_key=None, state=None, **kwargs):
db = database.MonitorDatabase()
if str(session_key).isdigit(): if str(session_key).isdigit():
values = {} values = {}
@ -603,21 +610,23 @@ class ActivityProcessor(object):
values[k] = v values[k] = v
keys = {'session_key': session_key} keys = {'session_key': session_key}
result = self.db.upsert('sessions', values, keys) result = db.upsert('sessions', values, keys)
return result return result
return None return None
def delete_session(self, session_key=None, row_id=None): def delete_session(self, session_key=None, row_id=None):
db = database.MonitorDatabase()
if str(session_key).isdigit(): if str(session_key).isdigit():
self.db.action("DELETE FROM sessions WHERE session_key = ?", [session_key]) db.action("DELETE FROM sessions WHERE session_key = ?", [session_key])
elif str(row_id).isdigit(): elif str(row_id).isdigit():
self.db.action("DELETE FROM sessions WHERE id = ?", [row_id]) db.action("DELETE FROM sessions WHERE id = ?", [row_id])
def set_session_last_paused(self, session_key=None, timestamp=None): def set_session_last_paused(self, session_key=None, timestamp=None):
db = database.MonitorDatabase()
if str(session_key).isdigit(): if str(session_key).isdigit():
result = self.db.select("SELECT last_paused, paused_counter " result = db.select("SELECT last_paused, paused_counter "
"FROM sessions " "FROM sessions "
"WHERE session_key = ?", args=[session_key]) "WHERE session_key = ?", args=[session_key])
@ -636,17 +645,19 @@ class ActivityProcessor(object):
values['paused_counter'] = paused_counter values['paused_counter'] = paused_counter
keys = {'session_key': session_key} keys = {'session_key': session_key}
self.db.upsert('sessions', values, keys) db.upsert('sessions', values, keys)
def increment_session_buffer_count(self, session_key=None): def increment_session_buffer_count(self, session_key=None):
db = database.MonitorDatabase()
if str(session_key).isdigit(): if str(session_key).isdigit():
self.db.action("UPDATE sessions SET buffer_count = buffer_count + 1 " db.action("UPDATE sessions SET buffer_count = buffer_count + 1 "
"WHERE session_key = ?", "WHERE session_key = ?",
[session_key]) [session_key])
def get_session_buffer_count(self, session_key=None): def get_session_buffer_count(self, session_key=None):
db = database.MonitorDatabase()
if str(session_key).isdigit(): if str(session_key).isdigit():
buffer_count = self.db.select_single("SELECT buffer_count " buffer_count = db.select_single("SELECT buffer_count "
"FROM sessions " "FROM sessions "
"WHERE session_key = ?", "WHERE session_key = ?",
[session_key]) [session_key])
@ -656,14 +667,16 @@ class ActivityProcessor(object):
return 0 return 0
def set_session_buffer_trigger_time(self, session_key=None): def set_session_buffer_trigger_time(self, session_key=None):
db = database.MonitorDatabase()
if str(session_key).isdigit(): if str(session_key).isdigit():
self.db.action("UPDATE sessions SET buffer_last_triggered = strftime('%s', 'now') " db.action("UPDATE sessions SET buffer_last_triggered = strftime('%s', 'now') "
"WHERE session_key = ?", "WHERE session_key = ?",
[session_key]) [session_key])
def get_session_buffer_trigger_time(self, session_key=None): def get_session_buffer_trigger_time(self, session_key=None):
db = database.MonitorDatabase()
if str(session_key).isdigit(): if str(session_key).isdigit():
last_time = self.db.select_single("SELECT buffer_last_triggered " last_time = db.select_single("SELECT buffer_last_triggered "
"FROM sessions " "FROM sessions "
"WHERE session_key = ?", "WHERE session_key = ?",
[session_key]) [session_key])
@ -673,37 +686,43 @@ class ActivityProcessor(object):
return None return None
def set_temp_stopped(self): def set_temp_stopped(self):
db = database.MonitorDatabase()
stopped_time = helpers.timestamp() stopped_time = helpers.timestamp()
self.db.action("UPDATE sessions SET stopped = ?", [stopped_time]) db.action("UPDATE sessions SET stopped = ?", [stopped_time])
def increment_write_attempts(self, session_key=None): def increment_write_attempts(self, session_key=None):
db = database.MonitorDatabase()
if str(session_key).isdigit(): if str(session_key).isdigit():
session = self.get_session_by_key(session_key=session_key) session = self.get_session_by_key(session_key=session_key)
self.db.action("UPDATE sessions SET write_attempts = ? WHERE session_key = ?", db.action("UPDATE sessions SET write_attempts = ? WHERE session_key = ?",
[session['write_attempts'] + 1, session_key]) [session['write_attempts'] + 1, session_key])
def set_marker(self, session_key=None, marker_idx=None, marker_type=None): def set_marker(self, session_key=None, marker_idx=None, marker_type=None):
db = database.MonitorDatabase()
marker_args = [ marker_args = [
int(marker_type == 'intro'), int(marker_type == 'intro'),
int(marker_type == 'commercial'), int(marker_type == 'commercial'),
int(marker_type == 'credits') int(marker_type == 'credits')
] ]
self.db.action("UPDATE sessions SET intro = ?, commercial = ?, credits = ?, marker = ? " db.action("UPDATE sessions SET intro = ?, commercial = ?, credits = ?, marker = ? "
"WHERE session_key = ?", "WHERE session_key = ?",
marker_args + [marker_idx, session_key]) marker_args + [marker_idx, session_key])
def set_watched(self, session_key=None): def set_watched(self, session_key=None):
self.db.action("UPDATE sessions SET watched = ? " db = database.MonitorDatabase()
db.action("UPDATE sessions SET watched = ? "
"WHERE session_key = ?", "WHERE session_key = ?",
[1, session_key]) [1, session_key])
def write_continued_session(self, user_id=None, machine_id=None, media_type=None, stopped=None): def write_continued_session(self, user_id=None, machine_id=None, media_type=None, stopped=None):
db = database.MonitorDatabase()
keys = {'user_id': user_id, 'machine_id': machine_id, 'media_type': media_type} keys = {'user_id': user_id, 'machine_id': machine_id, 'media_type': media_type}
values = {'stopped': stopped} values = {'stopped': stopped}
self.db.upsert(table_name='sessions_continued', key_dict=keys, value_dict=values) db.upsert(table_name='sessions_continued', key_dict=keys, value_dict=values)
def is_initial_stream(self, user_id=None, machine_id=None, media_type=None, started=None): def is_initial_stream(self, user_id=None, machine_id=None, media_type=None, started=None):
last_session = self.db.select_single("SELECT stopped " db = database.MonitorDatabase()
last_session = db.select_single("SELECT stopped "
"FROM sessions_continued " "FROM sessions_continued "
"WHERE user_id = ? AND machine_id = ? AND media_type = ? " "WHERE user_id = ? AND machine_id = ? AND media_type = ? "
"ORDER BY stopped DESC", "ORDER BY stopped DESC",
@ -717,11 +736,12 @@ class ActivityProcessor(object):
logger.info("Tautulli ActivityProcessor :: Regrouping session history...") logger.info("Tautulli ActivityProcessor :: Regrouping session history...")
db = database.MonitorDatabase()
query = ( query = (
"SELECT * FROM session_history " "SELECT * FROM session_history "
"JOIN session_history_metadata ON session_history.id = session_history_metadata.id" "JOIN session_history_metadata ON session_history.id = session_history_metadata.id"
) )
results = self.db.select(query) results = db.select(query)
count = len(results) count = len(results)
progress = 0 progress = 0

View file

@ -170,6 +170,7 @@ AUDIO_CODEC_OVERRIDES = {
VIDEO_RESOLUTION_OVERRIDES = { VIDEO_RESOLUTION_OVERRIDES = {
'sd': 'SD', 'sd': 'SD',
'2k': '2k',
'4k': '4k' '4k': '4k'
} }

View file

@ -24,8 +24,6 @@ from cloudinary.utils import cloudinary_url
from collections import OrderedDict from collections import OrderedDict
from datetime import date, datetime, timezone from datetime import date, datetime, timezone
from functools import reduce, wraps from functools import reduce, wraps
import hashlib
import imghdr
from itertools import groupby from itertools import groupby
from future.moves.itertools import islice, zip_longest from future.moves.itertools import islice, zip_longest
from ipaddress import ip_address, ip_network, IPv4Address from ipaddress import ip_address, ip_network, IPv4Address
@ -272,7 +270,8 @@ def human_duration(ms, sig='dhm', units='ms', return_seconds=300000):
if return_seconds and ms < return_seconds: if return_seconds and ms < return_seconds:
sig = 'dhms' sig = 'dhms'
ms = ms * factors[units] r = factors[sig[-1]]
ms = round(ms * factors[units] / r) * r
d, h = divmod(ms, factors['d']) d, h = divmod(ms, factors['d'])
h, m = divmod(h, factors['h']) h, m = divmod(h, factors['h'])
@ -942,39 +941,6 @@ def cloudinary_transform(rating_key=None, width=1000, height=1500, opacity=100,
return url return url
def cache_image(url, image=None):
"""
Saves an image to the cache directory.
If no image is provided, tries to return the image from the cache directory.
"""
# Create image directory if it doesn't exist
imgdir = os.path.join(plexpy.CONFIG.CACHE_DIR, 'images/')
if not os.path.exists(imgdir):
logger.debug("Tautulli Helpers :: Creating image cache directory at %s" % imgdir)
os.makedirs(imgdir)
# Create a hash of the url to use as the filename
imghash = hashlib.md5(url).hexdigest()
imagefile = os.path.join(imgdir, imghash)
# If an image is provided, save it to the cache directory
if image:
try:
with open(imagefile, 'wb') as cache_file:
cache_file.write(image)
except IOError as e:
logger.error("Tautulli Helpers :: Failed to cache image %s: %s" % (imagefile, e))
# Try to return the image from the cache directory
if os.path.isfile(imagefile):
imagetype = 'image/' + imghdr.what(os.path.abspath(imagefile))
else:
imagefile = None
imagetype = 'image/jpeg'
return imagefile, imagetype
def build_datatables_json(kwargs, dt_columns, default_sort_col=None): def build_datatables_json(kwargs, dt_columns, default_sort_col=None):
""" Builds datatables json data """ Builds datatables json data

View file

@ -17,6 +17,7 @@
from io import open from io import open
import os import os
import shlex
from apscheduler.triggers.cron import CronTrigger from apscheduler.triggers.cron import CronTrigger
import email.utils import email.utils
@ -58,25 +59,36 @@ def schedule_newsletters(newsletter_id=None):
def schedule_newsletter_job(newsletter_job_id, name='', func=None, remove_job=False, args=None, cron=None): def schedule_newsletter_job(newsletter_job_id, name='', func=None, remove_job=False, args=None, cron=None):
# apscheduler day_of_week uses 0-6 = mon-sun
if cron: if cron:
cron = cron.split(' ') values = shlex.split(cron)
cron[4] = str((int(cron[4]) - 1) % 7) if cron[4].isdigit() else cron[4] # apscheduler day_of_week uses 0-6 = mon-sun
cron = ' '.join(cron) values[4] = str((int(values[4]) - 1) % 7) if values[4].isdigit() else values[4]
if NEWSLETTER_SCHED.get_job(newsletter_job_id): if NEWSLETTER_SCHED.get_job(newsletter_job_id):
if remove_job: if remove_job:
NEWSLETTER_SCHED.remove_job(newsletter_job_id) NEWSLETTER_SCHED.remove_job(newsletter_job_id)
logger.info("Tautulli NewsletterHandler :: Removed scheduled newsletter: %s" % name) logger.info("Tautulli NewsletterHandler :: Removed scheduled newsletter: %s" % name)
else: else:
NEWSLETTER_SCHED.reschedule_job( try:
newsletter_job_id, args=args, trigger=CronTrigger.from_crontab(cron)) NEWSLETTER_SCHED.reschedule_job(
logger.info("Tautulli NewsletterHandler :: Re-scheduled newsletter: %s" % name) newsletter_job_id, args=args, trigger=CronTrigger(
minute=values[0], hour=values[1], day=values[2], month=values[3], day_of_week=values[4]
)
)
logger.info("Tautulli NewsletterHandler :: Re-scheduled newsletter: %s" % name)
except ValueError as e:
logger.error("Tautulli NewsletterHandler :: Failed to re-schedule newsletter: %s" % e)
elif not remove_job: elif not remove_job:
NEWSLETTER_SCHED.add_job( try:
func, args=args, id=newsletter_job_id, trigger=CronTrigger.from_crontab(cron), NEWSLETTER_SCHED.add_job(
misfire_grace_time=None) func, args=args, id=newsletter_job_id, trigger=CronTrigger(
logger.info("Tautulli NewsletterHandler :: Scheduled newsletter: %s" % name) minute=values[0], hour=values[1], day=values[2], month=values[3], day_of_week=values[4]
),
misfire_grace_time=None
)
logger.info("Tautulli NewsletterHandler :: Scheduled newsletter: %s" % name)
except ValueError as e:
logger.error("Tautulli NewsletterHandler :: Failed to schedule newsletter: %s" % e)
def notify(newsletter_id=None, notify_action=None, **kwargs): def notify(newsletter_id=None, notify_action=None, **kwargs):

View file

@ -586,6 +586,8 @@ class Newsletter(object):
return parameters return parameters
def _build_params(self): def _build_params(self):
from plexpy.notification_handler import CustomArrow
date_format = helpers.momentjs_to_arrow(plexpy.CONFIG.DATE_FORMAT) date_format = helpers.momentjs_to_arrow(plexpy.CONFIG.DATE_FORMAT)
if plexpy.CONFIG.NEWSLETTER_SELF_HOSTED and plexpy.CONFIG.HTTP_BASE_URL: if plexpy.CONFIG.NEWSLETTER_SELF_HOSTED and plexpy.CONFIG.HTTP_BASE_URL:
@ -595,8 +597,8 @@ class Newsletter(object):
parameters = { parameters = {
'server_name': helpers.pms_name(), 'server_name': helpers.pms_name(),
'start_date': self.start_date.format(date_format), 'start_date': CustomArrow(self.start_date, date_format),
'end_date': self.end_date.format(date_format), 'end_date': CustomArrow(self.end_date, date_format),
'current_year': self.start_date.year, 'current_year': self.start_date.year,
'current_month': self.start_date.month, 'current_month': self.start_date.month,
'current_day': self.start_date.day, 'current_day': self.start_date.day,

View file

@ -16,4 +16,4 @@
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>. # along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
PLEXPY_BRANCH = "master" PLEXPY_BRANCH = "master"
PLEXPY_RELEASE_VERSION = "v2.14.4" PLEXPY_RELEASE_VERSION = "v2.14.6"

View file

@ -281,7 +281,7 @@ def check_github(scheduler=False, notify=False, use_cache=False):
'plexpy_update_commit': plexpy.LATEST_VERSION, 'plexpy_update_commit': plexpy.LATEST_VERSION,
'plexpy_update_behind': plexpy.COMMITS_BEHIND}) 'plexpy_update_behind': plexpy.COMMITS_BEHIND})
elif scheduler and plexpy.CONFIG.PLEXPY_AUTO_UPDATE and \ if scheduler and plexpy.CONFIG.PLEXPY_AUTO_UPDATE and \
not plexpy.DOCKER and not plexpy.SNAP and not plexpy.FROZEN: not plexpy.DOCKER and not plexpy.SNAP and not plexpy.FROZEN:
logger.info('Running automatic update.') logger.info('Running automatic update.')
plexpy.shutdown(restart=True, update=True) plexpy.shutdown(restart=True, update=True)

View file

@ -3,7 +3,7 @@ arrow==1.3.0
backports.zoneinfo==0.2.1;python_version<"3.9" backports.zoneinfo==0.2.1;python_version<"3.9"
beautifulsoup4==4.12.3 beautifulsoup4==4.12.3
bleach==6.1.0 bleach==6.1.0
certifi==2024.7.4 certifi==2024.8.30
cheroot==10.0.1 cheroot==10.0.1
cherrypy==18.10.0 cherrypy==18.10.0
cloudinary==1.41.0 cloudinary==1.41.0
@ -17,7 +17,7 @@ html5lib==1.1
httpagentparser==1.9.5 httpagentparser==1.9.5
idna==3.7 idna==3.7
importlib-metadata==8.5.0 importlib-metadata==8.5.0
importlib-resources==6.4.0 importlib-resources==6.4.5
git+https://github.com/Tautulli/ipwhois.git@master#egg=ipwhois git+https://github.com/Tautulli/ipwhois.git@master#egg=ipwhois
IPy==1.01 IPy==1.01
Mako==1.3.5 Mako==1.3.5
@ -26,18 +26,18 @@ musicbrainzngs==0.7.1
packaging==24.1 packaging==24.1
paho-mqtt==2.1.0 paho-mqtt==2.1.0
platformdirs==4.2.2 platformdirs==4.2.2
plexapi==4.15.15 plexapi==4.15.16
portend==3.2.0 portend==3.2.0
profilehooks==1.12.0 profilehooks==1.12.0
PyJWT==2.9.0 PyJWT==2.9.0
pyparsing==3.1.2 pyparsing==3.1.4
python-dateutil==2.9.0.post0 python-dateutil==2.9.0.post0
python-twitter==3.5 python-twitter==3.5
pytz==2024.1 pytz==2024.1
requests==2.32.3 requests==2.32.3
requests-oauthlib==2.0.0 requests-oauthlib==2.0.0
rumps==0.4.0; platform_system == "Darwin" rumps==0.4.0; platform_system == "Darwin"
simplejson==3.19.2 simplejson==3.19.3
six==1.16.0 six==1.16.0
tempora==5.7.0 tempora==5.7.0
tokenize-rt==6.0.0 tokenize-rt==6.0.0

View file

@ -26,14 +26,14 @@ parts:
stage-packages: stage-packages:
- python3 - python3
- python3-openssl - python3-openssl
- python3-pycryptodome - python3-cryptography
- python3-setuptools - python3-setuptools
- python3-pkg-resources - python3-pkg-resources
build-packages: build-packages:
- git - git
- python3 - python3
- python3-openssl - python3-openssl
- python3-pycryptodome - python3-cryptography
- python3-setuptools - python3-setuptools
- python3-pkg-resources - python3-pkg-resources
override-pull: | override-pull: |