New: Update Custom Format renaming token to allow excluding specific formats
Closes #6615
This commit is contained in:
parent
86034beccd
commit
6584d95331
|
@ -93,6 +93,30 @@ namespace NzbDrone.Core.Test.OrganizerTests.FileNameBuilderTests
|
|||
.Should().Be(expected);
|
||||
}
|
||||
|
||||
[TestCase("{Custom Formats:-INTERNAL}", "AMZN NAME WITH SPACES")]
|
||||
[TestCase("{Custom Formats:-NAME WITH SPACES}", "INTERNAL AMZN")]
|
||||
[TestCase("{Custom Formats:-INTERNAL,NAME WITH SPACES}", "AMZN")]
|
||||
[TestCase("{Custom Formats:INTERNAL}", "INTERNAL")]
|
||||
[TestCase("{Custom Formats:NAME WITH SPACES}", "NAME WITH SPACES")]
|
||||
[TestCase("{Custom Formats:INTERNAL,NAME WITH SPACES}", "INTERNAL NAME WITH SPACES")]
|
||||
public void should_replace_custom_formats_with_filtered_names(string format, string expected)
|
||||
{
|
||||
_namingConfig.StandardEpisodeFormat = format;
|
||||
|
||||
Subject.BuildFileName(new List<Episode> { _episode1 }, _series, _episodeFile, customFormats: _customFormats)
|
||||
.Should().Be(expected);
|
||||
}
|
||||
|
||||
[TestCase("{Custom Formats:-}", "{Custom Formats:-}")]
|
||||
[TestCase("{Custom Formats:}", "{Custom Formats:}")]
|
||||
public void should_not_replace_custom_formats_due_to_invalid_token(string format, string expected)
|
||||
{
|
||||
_namingConfig.StandardEpisodeFormat = format;
|
||||
|
||||
Subject.BuildFileName(new List<Episode> { _episode1 }, _series, _episodeFile, customFormats: _customFormats)
|
||||
.Should().Be(expected);
|
||||
}
|
||||
|
||||
[TestCase("{Custom Format}", "")]
|
||||
[TestCase("{Custom Format:INTERNAL}", "INTERNAL")]
|
||||
[TestCase("{Custom Format:AMZN}", "AMZN")]
|
||||
|
|
|
@ -47,7 +47,7 @@ namespace NzbDrone.Core.Organizer
|
|||
private readonly ICached<bool> _patternHasEpisodeIdentifierCache;
|
||||
private readonly Logger _logger;
|
||||
|
||||
private static readonly Regex TitleRegex = new Regex(@"(?<escaped>\{\{|\}\})|\{(?<prefix>[- ._\[(]*)(?<token>(?:[a-z0-9]+)(?:(?<separator>[- ._]+)(?:[a-z0-9]+))?)(?::(?<customFormat>[ a-z0-9+-]+(?<![- ])))?(?<suffix>[- ._)\]]*)\}",
|
||||
private static readonly Regex TitleRegex = new Regex(@"(?<escaped>\{\{|\}\})|\{(?<prefix>[- ._\[(]*)(?<token>(?:[a-z0-9]+)(?:(?<separator>[- ._]+)(?:[a-z0-9]+))?)(?::(?<customFormat>[ ,a-z0-9+-]+(?<![- ])))?(?<suffix>[- ._)\]]*)\}",
|
||||
RegexOptions.Compiled | RegexOptions.IgnoreCase | RegexOptions.CultureInvariant);
|
||||
|
||||
private static readonly Regex EpisodeRegex = new Regex(@"(?<episode>\{episode(?:\:0+)?})",
|
||||
|
@ -698,7 +698,7 @@ namespace NzbDrone.Core.Organizer
|
|||
customFormats = _formatCalculator.ParseCustomFormat(episodeFile, series);
|
||||
}
|
||||
|
||||
tokenHandlers["{Custom Formats}"] = m => string.Join(" ", customFormats.Where(x => x.IncludeCustomFormatWhenRenaming));
|
||||
tokenHandlers["{Custom Formats}"] = m => GetCustomFormatsToken(customFormats, m.CustomFormat);
|
||||
tokenHandlers["{Custom Format}"] = m =>
|
||||
{
|
||||
if (m.CustomFormat.IsNullOrWhiteSpace())
|
||||
|
@ -717,6 +717,29 @@ namespace NzbDrone.Core.Organizer
|
|||
tokenHandlers["{TvMazeId}"] = m => series.TvMazeId > 0 ? series.TvMazeId.ToString() : string.Empty;
|
||||
}
|
||||
|
||||
private string GetCustomFormatsToken(List<CustomFormat> customFormats, string filter)
|
||||
{
|
||||
var tokens = customFormats.Where(x => x.IncludeCustomFormatWhenRenaming);
|
||||
|
||||
var filteredTokens = tokens;
|
||||
|
||||
if (filter.IsNotNullOrWhiteSpace())
|
||||
{
|
||||
if (filter.StartsWith("-"))
|
||||
{
|
||||
var splitFilter = filter.Substring(1).Split(',');
|
||||
filteredTokens = tokens.Where(c => !splitFilter.Contains(c.Name)).ToList();
|
||||
}
|
||||
else
|
||||
{
|
||||
var splitFilter = filter.Split(',');
|
||||
filteredTokens = tokens.Where(c => splitFilter.Contains(c.Name)).ToList();
|
||||
}
|
||||
}
|
||||
|
||||
return string.Join(" ", filteredTokens);
|
||||
}
|
||||
|
||||
private string GetLanguagesToken(List<string> mediaInfoLanguages, string filter, bool skipEnglishOnly, bool quoted)
|
||||
{
|
||||
var tokens = new List<string>();
|
||||
|
|
Loading…
Reference in New Issue