Fixed: Refactored the Indexer architecture to support non-rss indexers.
This commit is contained in:
parent
22c9bc402f
commit
5e62c2335f
|
@ -0,0 +1,26 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
|
||||
namespace NzbDrone.Common.Http
|
||||
{
|
||||
public sealed class HttpAccept
|
||||
{
|
||||
public static readonly HttpAccept Rss = new HttpAccept("application/rss+xml, text/rss+xml, text/xml");
|
||||
public static readonly HttpAccept Json = new HttpAccept("application/json");
|
||||
public static readonly HttpAccept Html = new HttpAccept("text/html");
|
||||
|
||||
public String Value { get; private set; }
|
||||
|
||||
public HttpAccept(String accept)
|
||||
{
|
||||
Value = accept;
|
||||
}
|
||||
|
||||
public override string ToString()
|
||||
{
|
||||
return Value;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -46,6 +46,7 @@ namespace NzbDrone.Common.Http
|
|||
webRequest.Method = request.Method.ToString();
|
||||
webRequest.UserAgent = _userAgent;
|
||||
webRequest.KeepAlive = false;
|
||||
webRequest.AllowAutoRedirect = request.AllowAutoRedirect;
|
||||
|
||||
if (!RuntimeInfoBase.IsProduction)
|
||||
{
|
||||
|
@ -61,8 +62,7 @@ namespace NzbDrone.Common.Http
|
|||
|
||||
if (!request.Body.IsNullOrWhiteSpace())
|
||||
{
|
||||
var bytes = new byte[request.Body.Length * sizeof(char)];
|
||||
Buffer.BlockCopy(request.Body.ToCharArray(), 0, bytes, 0, bytes.Length);
|
||||
var bytes = request.Headers.GetEncodingFromContentType().GetBytes(request.Body.ToCharArray());
|
||||
|
||||
webRequest.ContentLength = bytes.Length;
|
||||
using (var writeStream = webRequest.GetRequestStream())
|
||||
|
|
|
@ -14,6 +14,12 @@ namespace NzbDrone.Common.Http
|
|||
Response = response;
|
||||
}
|
||||
|
||||
public HttpException(HttpResponse response)
|
||||
: this(response.Request, response)
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
public override string ToString()
|
||||
{
|
||||
if (Response != null)
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
using System;
|
||||
using System.Linq;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Specialized;
|
||||
using System.Text;
|
||||
|
||||
namespace NzbDrone.Common.Http
|
||||
{
|
||||
|
@ -66,5 +68,35 @@ namespace NzbDrone.Common.Http
|
|||
this["Accept"] = value;
|
||||
}
|
||||
}
|
||||
|
||||
public Encoding GetEncodingFromContentType()
|
||||
{
|
||||
Encoding encoding = null;
|
||||
|
||||
if (ContentType.IsNotNullOrWhiteSpace())
|
||||
{
|
||||
var charset = ContentType.ToLowerInvariant()
|
||||
.Split(';', '=', ' ')
|
||||
.SkipWhile(v => v != "charset")
|
||||
.Skip(1).FirstOrDefault();
|
||||
|
||||
if (charset.IsNotNullOrWhiteSpace())
|
||||
{
|
||||
encoding = Encoding.GetEncoding(charset);
|
||||
}
|
||||
}
|
||||
|
||||
if (encoding == null)
|
||||
{
|
||||
// TODO: Find encoding by Byte order mask.
|
||||
}
|
||||
|
||||
if (encoding == null)
|
||||
{
|
||||
encoding = Encoding.UTF8;
|
||||
}
|
||||
|
||||
return encoding;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -6,14 +6,19 @@ namespace NzbDrone.Common.Http
|
|||
{
|
||||
public class HttpRequest
|
||||
{
|
||||
|
||||
private readonly Dictionary<string, string> _segments;
|
||||
|
||||
public HttpRequest(string url)
|
||||
public HttpRequest(string url, HttpAccept httpAccept = null)
|
||||
{
|
||||
UriBuilder = new UriBuilder(url);
|
||||
Headers = new HttpHeader();
|
||||
_segments = new Dictionary<string, string>();
|
||||
AllowAutoRedirect = true;
|
||||
|
||||
if (httpAccept != null)
|
||||
{
|
||||
Headers.Accept = httpAccept.Value;
|
||||
}
|
||||
}
|
||||
|
||||
public UriBuilder UriBuilder { get; private set; }
|
||||
|
@ -38,6 +43,7 @@ namespace NzbDrone.Common.Http
|
|||
public string Body { get; set; }
|
||||
public NetworkCredential NetworkCredential { get; set; }
|
||||
public bool SuppressHttpError { get; set; }
|
||||
public bool AllowAutoRedirect { get; set; }
|
||||
|
||||
public override string ToString()
|
||||
{
|
||||
|
|
|
@ -21,7 +21,7 @@ namespace NzbDrone.Common.Http
|
|||
{
|
||||
Request = request;
|
||||
Headers = headers;
|
||||
ResponseData = Encoding.UTF8.GetBytes(content);
|
||||
ResponseData = Headers.GetEncodingFromContentType().GetBytes(content);
|
||||
_content = content;
|
||||
StatusCode = statusCode;
|
||||
}
|
||||
|
@ -39,7 +39,7 @@ namespace NzbDrone.Common.Http
|
|||
{
|
||||
if (_content == null)
|
||||
{
|
||||
_content = GetStringFromResponseData();
|
||||
_content = Headers.GetEncodingFromContentType().GetString(ResponseData);
|
||||
}
|
||||
|
||||
return _content;
|
||||
|
@ -66,36 +66,6 @@ namespace NzbDrone.Common.Http
|
|||
|
||||
return result;
|
||||
}
|
||||
|
||||
protected virtual String GetStringFromResponseData()
|
||||
{
|
||||
Encoding encoding = null;
|
||||
|
||||
if (Headers.ContentType.IsNotNullOrWhiteSpace())
|
||||
{
|
||||
var charset = Headers.ContentType.ToLowerInvariant()
|
||||
.Split(';', '=', ' ')
|
||||
.SkipWhile(v => v != "charset")
|
||||
.Skip(1).FirstOrDefault();
|
||||
|
||||
if (charset.IsNotNullOrWhiteSpace())
|
||||
{
|
||||
encoding = Encoding.GetEncoding(charset);
|
||||
}
|
||||
}
|
||||
|
||||
if (encoding == null)
|
||||
{
|
||||
// TODO: Find encoding by Byte order mask.
|
||||
}
|
||||
|
||||
if (encoding == null)
|
||||
{
|
||||
encoding = Encoding.UTF8;
|
||||
}
|
||||
|
||||
return encoding.GetString(ResponseData);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -114,6 +114,7 @@
|
|||
<Compile Include="Http\GZipWebClient.cs">
|
||||
<SubType>Component</SubType>
|
||||
</Compile>
|
||||
<Compile Include="Http\HttpAccept.cs" />
|
||||
<Compile Include="Http\HttpClient.cs" />
|
||||
<Compile Include="Http\HttpException.cs" />
|
||||
<Compile Include="Http\HttpHeader.cs" />
|
||||
|
|
|
@ -0,0 +1,37 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
|
||||
<channel>
|
||||
<title>Anime :: Fanzub</title>
|
||||
<link>http://www.fanzub.com/</link>
|
||||
<description>A Usenet Search Engine for Japanese Media</description>
|
||||
<language>en-us</language>
|
||||
<atom:link href="http://fanzub.com/rss?cat=anime" rel="self" type="application/rss+xml" />
|
||||
<item>
|
||||
<title>[Vivid] Hanayamata - 10 [A33D6606]</title>
|
||||
<link>http://fanzub.com/nzb/296464</link>
|
||||
<description><i>Age</i>: 0 days<br /><i>Size</i>: 530.48 MiB<br /><i>Parts</i>: 100%<br /><i>Files</i>: 1 other, 8 par2<br /><i>Subject</i>: [9/9] [Vivid] Hanayamata - 10 [A33D6606].vol63+27.par2 (1/28)</description>
|
||||
<category>Anime</category>
|
||||
<pubDate>Sat, 13 Sep 2014 12:56:53 +0000</pubDate>
|
||||
<enclosure url="http://fanzub.com/nzb/296464/Vivid%20Hanayamata%20-%2010.nzb" length="556246858" type="application/x-nzb" />
|
||||
<guid isPermaLink="false">http://fanzub.com/nzb/296464</guid>
|
||||
</item>
|
||||
<item>
|
||||
<title>(Sniper2000) - Pokemon HD - XY 37</title>
|
||||
<link>http://fanzub.com/nzb/296456</link>
|
||||
<description><i>Age</i>: 0 days<br /><i>Size</i>: 2.79 GiB<br /><i>Parts</i>: 100%<br /><i>Files</i>: 1 nzb, 1 other, 77 par2, 30 rar<br /><i>Subject</i>: (Sniper2000) [108/108] - "XY 37.vol183+176.PAR2"Pokemon HD (1/272)</description>
|
||||
<category>Anime</category>
|
||||
<pubDate>Sat, 13 Sep 2014 12:38:03 +0000</pubDate>
|
||||
<enclosure url="http://fanzub.com/nzb/296456/Sniper2000%20-%20Pokemon%20HD%20-%20XY%2037.nzb" length="2995093986" type="application/x-nzb" />
|
||||
<guid isPermaLink="false">http://fanzub.com/nzb/296456</guid>
|
||||
</item>
|
||||
<item>
|
||||
<title>[HorribleSubs] Kindaichi Case Files R - 23 [480p].mkv</title>
|
||||
<link>http://fanzub.com/nzb/296472</link>
|
||||
<description><i>Age</i>: 0 days<br /><i>Size</i>: 153.87 MiB<br /><i>Parts</i>: 100%<br /><i>Files</i>: 7 par2, 6 split<br /><i>Subject</i>: [HorribleSubs] Kindaichi Case Files R - 23 [480p] [13/13] - "[HorribleSubs] Kindaichi Case Files R - 23 [480p].mkv.vol31+06.par2" yEnc (1/7)</description>
|
||||
<category>Anime</category>
|
||||
<pubDate>Sat, 13 Sep 2014 11:51:59 +0000</pubDate>
|
||||
<enclosure url="http://fanzub.com/nzb/296472/HorribleSubs%20Kindaichi%20Case%20Files%20R%20-%2023%20480p.nzb" length="161341092" type="application/x-nzb" />
|
||||
<guid isPermaLink="false">http://fanzub.com/nzb/296472</guid>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>
|
File diff suppressed because it is too large
Load Diff
|
@ -86,7 +86,7 @@ namespace NzbDrone.Core.Test.HealthCheck.Checks
|
|||
indexer1.SetupGet(s => s.SupportsRss).Returns(true);
|
||||
indexer1.SetupGet(s => s.SupportsSearch).Returns(true);
|
||||
|
||||
var indexer2 = Mocker.GetMock<Wombles>();
|
||||
var indexer2 = new Moq.Mock<IIndexer>();
|
||||
indexer2.SetupGet(s => s.SupportsRss).Returns(true);
|
||||
indexer2.SetupGet(s => s.SupportsSearch).Returns(false);
|
||||
|
||||
|
|
|
@ -17,18 +17,19 @@ namespace NzbDrone.Core.Test.IndexerSearchTests
|
|||
{
|
||||
public class NzbSearchServiceFixture : CoreTest<NzbSearchService>
|
||||
{
|
||||
private Mock<IIndexer> _mockIndexer;
|
||||
private Series _xemSeries;
|
||||
private List<Episode> _xemEpisodes;
|
||||
|
||||
[SetUp]
|
||||
public void SetUp()
|
||||
{
|
||||
var indexer = Mocker.GetMock<IIndexer>();
|
||||
indexer.SetupGet(s => s.SupportsSearch).Returns(true);
|
||||
_mockIndexer = Mocker.GetMock<IIndexer>();
|
||||
_mockIndexer.SetupGet(s => s.SupportsSearch).Returns(true);
|
||||
|
||||
Mocker.GetMock<IIndexerFactory>()
|
||||
.Setup(s => s.SearchEnabled())
|
||||
.Returns(new List<IIndexer> { indexer.Object });
|
||||
.Returns(new List<IIndexer> { _mockIndexer.Object });
|
||||
|
||||
Mocker.GetMock<IMakeDownloadDecision>()
|
||||
.Setup(s => s.GetSearchDecision(It.IsAny<List<Parser.Model.ReleaseInfo>>(), It.IsAny<SearchCriteriaBase>()))
|
||||
|
@ -97,19 +98,16 @@ namespace NzbDrone.Core.Test.IndexerSearchTests
|
|||
{
|
||||
var result = new List<SearchCriteriaBase>();
|
||||
|
||||
Mocker.GetMock<IFetchFeedFromIndexers>()
|
||||
.Setup(v => v.Fetch(It.IsAny<IIndexer>(), It.IsAny<SingleEpisodeSearchCriteria>()))
|
||||
.Callback<IIndexer, SingleEpisodeSearchCriteria>((i, s) => result.Add(s))
|
||||
_mockIndexer.Setup(v => v.Fetch(It.IsAny<SingleEpisodeSearchCriteria>()))
|
||||
.Callback<SingleEpisodeSearchCriteria>(s => result.Add(s))
|
||||
.Returns(new List<Parser.Model.ReleaseInfo>());
|
||||
|
||||
Mocker.GetMock<IFetchFeedFromIndexers>()
|
||||
.Setup(v => v.Fetch(It.IsAny<IIndexer>(), It.IsAny<SeasonSearchCriteria>()))
|
||||
.Callback<IIndexer, SeasonSearchCriteria>((i, s) => result.Add(s))
|
||||
_mockIndexer.Setup(v => v.Fetch(It.IsAny<SeasonSearchCriteria>()))
|
||||
.Callback<SeasonSearchCriteria>(s => result.Add(s))
|
||||
.Returns(new List<Parser.Model.ReleaseInfo>());
|
||||
|
||||
Mocker.GetMock<IFetchFeedFromIndexers>()
|
||||
.Setup(v => v.Fetch(It.IsAny<IIndexer>(), It.IsAny<AnimeEpisodeSearchCriteria>()))
|
||||
.Callback<IIndexer, AnimeEpisodeSearchCriteria>((i, s) => result.Add(s))
|
||||
_mockIndexer.Setup(v => v.Fetch(It.IsAny<AnimeEpisodeSearchCriteria>()))
|
||||
.Callback<AnimeEpisodeSearchCriteria>(s => result.Add(s))
|
||||
.Returns(new List<Parser.Model.ReleaseInfo>());
|
||||
|
||||
return result;
|
||||
|
|
|
@ -0,0 +1,56 @@
|
|||
using System;
|
||||
using System.Linq;
|
||||
using FluentAssertions;
|
||||
using Moq;
|
||||
using NUnit.Framework;
|
||||
using NzbDrone.Common.Http;
|
||||
using NzbDrone.Core.Indexers;
|
||||
using NzbDrone.Core.Indexers.Animezb;
|
||||
using NzbDrone.Core.Parser.Model;
|
||||
using NzbDrone.Core.Test.Framework;
|
||||
using NzbDrone.Core.ThingiProvider;
|
||||
|
||||
namespace NzbDrone.Core.Test.IndexerTests.AnimezbTests
|
||||
{
|
||||
[TestFixture]
|
||||
public class AnimezbFixture : CoreTest<Animezb>
|
||||
{
|
||||
[SetUp]
|
||||
public void Setup()
|
||||
{
|
||||
Subject.Definition = new IndexerDefinition()
|
||||
{
|
||||
Name = "Animezb",
|
||||
Settings = new NullConfig()
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void should_parse_recent_feed_from_Animezb()
|
||||
{
|
||||
Assert.Inconclusive("Waiting for animezb to get back up.");
|
||||
|
||||
var recentFeed = ReadAllText(@"Files/RSS/Animezb.xml");
|
||||
|
||||
Mocker.GetMock<IHttpClient>()
|
||||
.Setup(o => o.Execute(It.Is<HttpRequest>(v => v.Method == HttpMethod.GET)))
|
||||
.Returns<HttpRequest>(r => new HttpResponse(r, new HttpHeader(), recentFeed));
|
||||
|
||||
var releases = Subject.FetchRecent();
|
||||
|
||||
releases.Should().HaveCount(3);
|
||||
|
||||
var releaseInfo = releases.First();
|
||||
|
||||
//releaseInfo.Title.Should().Be("[Vivid] Hanayamata - 10 [A33D6606]");
|
||||
releaseInfo.DownloadProtocol.Should().Be(DownloadProtocol.Usenet);
|
||||
//releaseInfo.DownloadUrl.Should().Be("http://fanzub.com/nzb/296464/Vivid%20Hanayamata%20-%2010.nzb");
|
||||
releaseInfo.InfoUrl.Should().BeNullOrEmpty();
|
||||
releaseInfo.CommentUrl.Should().BeNullOrEmpty();
|
||||
releaseInfo.Indexer.Should().Be(Subject.Definition.Name);
|
||||
//releaseInfo.PublishDate.Should().Be(DateTime.Parse("2014/09/13 12:56:53"));
|
||||
//releaseInfo.Size.Should().Be(556246858);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -5,7 +5,7 @@ using NzbDrone.Core.Test.Framework;
|
|||
|
||||
namespace NzbDrone.Core.Test.IndexerTests
|
||||
{
|
||||
public class BasicRssParserFixture : CoreTest<RssParserBase>
|
||||
public class BasicRssParserFixture : CoreTest<RssParser>
|
||||
{
|
||||
[TestCase("5.64 GB", 6055903887)]
|
||||
[TestCase("5.54 GiB", 5948529705)]
|
||||
|
@ -16,7 +16,7 @@ namespace NzbDrone.Core.Test.IndexerTests
|
|||
[TestCase("845 MB", 886046720)]
|
||||
public void parse_size(string sizeString, long expectedSize)
|
||||
{
|
||||
var result = RssParserBase.ParseSize(sizeString, true);
|
||||
var result = RssParser.ParseSize(sizeString, true);
|
||||
|
||||
result.Should().Be(expectedSize);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,53 @@
|
|||
using System;
|
||||
using System.Linq;
|
||||
using FluentAssertions;
|
||||
using Moq;
|
||||
using NUnit.Framework;
|
||||
using NzbDrone.Common.Http;
|
||||
using NzbDrone.Core.Indexers;
|
||||
using NzbDrone.Core.Indexers.Fanzub;
|
||||
using NzbDrone.Core.Parser.Model;
|
||||
using NzbDrone.Core.Test.Framework;
|
||||
using NzbDrone.Core.ThingiProvider;
|
||||
|
||||
namespace NzbDrone.Core.Test.IndexerTests.FanzubTests
|
||||
{
|
||||
[TestFixture]
|
||||
public class FanzubFixture : CoreTest<Fanzub>
|
||||
{
|
||||
[SetUp]
|
||||
public void Setup()
|
||||
{
|
||||
Subject.Definition = new IndexerDefinition()
|
||||
{
|
||||
Name = "Fanzub",
|
||||
Settings = new NullConfig()
|
||||
};
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void should_parse_recent_feed_from_fanzub()
|
||||
{
|
||||
var recentFeed = ReadAllText(@"Files/RSS/fanzub.xml");
|
||||
|
||||
Mocker.GetMock<IHttpClient>()
|
||||
.Setup(o => o.Execute(It.Is<HttpRequest>(v => v.Method == HttpMethod.GET)))
|
||||
.Returns<HttpRequest>(r => new HttpResponse(r, new HttpHeader(), recentFeed));
|
||||
|
||||
var releases = Subject.FetchRecent();
|
||||
|
||||
releases.Should().HaveCount(3);
|
||||
|
||||
var releaseInfo = releases.First();
|
||||
|
||||
releaseInfo.Title.Should().Be("[Vivid] Hanayamata - 10 [A33D6606]");
|
||||
releaseInfo.DownloadProtocol.Should().Be(DownloadProtocol.Usenet);
|
||||
releaseInfo.DownloadUrl.Should().Be("http://fanzub.com/nzb/296464/Vivid%20Hanayamata%20-%2010.nzb");
|
||||
releaseInfo.InfoUrl.Should().BeNullOrEmpty();
|
||||
releaseInfo.CommentUrl.Should().BeNullOrEmpty();
|
||||
releaseInfo.Indexer.Should().Be(Subject.Definition.Name);
|
||||
releaseInfo.PublishDate.Should().Be(DateTime.Parse("2014/09/13 12:56:53"));
|
||||
releaseInfo.Size.Should().Be(556246858);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -20,9 +20,9 @@ namespace NzbDrone.Core.Test.IndexerTests
|
|||
{
|
||||
_indexers = new List<IIndexer>();
|
||||
|
||||
_indexers.Add(Mocker.GetMock<Newznab>().Object);
|
||||
_indexers.Add(new Omgwtfnzbs());
|
||||
_indexers.Add(new Wombles());
|
||||
_indexers.Add(Mocker.Resolve<Newznab>());
|
||||
_indexers.Add(Mocker.Resolve<Omgwtfnzbs>());
|
||||
_indexers.Add(Mocker.Resolve<Wombles>());
|
||||
|
||||
Mocker.SetConstant<IEnumerable<IIndexer>>(_indexers);
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@ using System.Linq;
|
|||
namespace NzbDrone.Core.Test.IndexerTests.IntegrationTests
|
||||
{
|
||||
[IntegrationTest]
|
||||
public class IndexerIntegrationTests : CoreTest<FetchFeedService>
|
||||
public class IndexerIntegrationTests : CoreTest<Wombles>
|
||||
{
|
||||
[SetUp]
|
||||
public void SetUp()
|
||||
|
@ -24,15 +24,13 @@ namespace NzbDrone.Core.Test.IndexerTests.IntegrationTests
|
|||
[Test]
|
||||
public void wombles_rss()
|
||||
{
|
||||
var indexer = new Wombles();
|
||||
|
||||
indexer.Definition = new IndexerDefinition
|
||||
Subject.Definition = new IndexerDefinition
|
||||
{
|
||||
Name = "Wombles",
|
||||
Settings = NullConfig.Instance
|
||||
};
|
||||
|
||||
var result = Subject.FetchRss(indexer);
|
||||
var result = Subject.FetchRecent();
|
||||
|
||||
ValidateResult(result, skipSize: true, skipInfo: true);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,57 @@
|
|||
using System;
|
||||
using System.Linq;
|
||||
using FluentAssertions;
|
||||
using Moq;
|
||||
using NUnit.Framework;
|
||||
using NzbDrone.Common.Http;
|
||||
using NzbDrone.Core.Indexers;
|
||||
using NzbDrone.Core.Indexers.Newznab;
|
||||
using NzbDrone.Core.Parser.Model;
|
||||
using NzbDrone.Core.Test.Framework;
|
||||
using NzbDrone.Core.ThingiProvider;
|
||||
|
||||
namespace NzbDrone.Core.Test.IndexerTests.NewznabTests
|
||||
{
|
||||
[TestFixture]
|
||||
public class NewznabFixture : CoreTest<Newznab>
|
||||
{
|
||||
[SetUp]
|
||||
public void Setup()
|
||||
{
|
||||
Subject.Definition = new IndexerDefinition()
|
||||
{
|
||||
Name = "Newznab",
|
||||
Settings = new NewznabSettings()
|
||||
{
|
||||
Url = "http://indexer.local/",
|
||||
Categories = new Int32[] { 1 }
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void should_parse_recent_feed_from_newznab_nzb_su()
|
||||
{
|
||||
var recentFeed = ReadAllText(@"Files/RSS/newznab_nzb_su.xml");
|
||||
|
||||
Mocker.GetMock<IHttpClient>()
|
||||
.Setup(o => o.Execute(It.Is<HttpRequest>(v => v.Method == HttpMethod.GET)))
|
||||
.Returns<HttpRequest>(r => new HttpResponse(r, new HttpHeader(), recentFeed));
|
||||
|
||||
var releases = Subject.FetchRecent();
|
||||
|
||||
releases.Should().HaveCount(100);
|
||||
|
||||
var releaseInfo = releases.First();
|
||||
|
||||
releaseInfo.Title.Should().Be("White.Collar.S03E05.720p.HDTV.X264-DIMENSION");
|
||||
releaseInfo.DownloadProtocol.Should().Be(DownloadProtocol.Usenet);
|
||||
releaseInfo.DownloadUrl.Should().Be("http://nzb.su/getnzb/24967ef4c2e26296c65d3bbfa97aa8fe.nzb&i=37292&r=xxx");
|
||||
releaseInfo.InfoUrl.Should().Be("http://nzb.su/details/24967ef4c2e26296c65d3bbfa97aa8fe");
|
||||
releaseInfo.CommentUrl.Should().Be("http://nzb.su/details/24967ef4c2e26296c65d3bbfa97aa8fe#comments");
|
||||
releaseInfo.Indexer.Should().Be(Subject.Definition.Name);
|
||||
releaseInfo.PublishDate.Should().Be(DateTime.Parse("2012/02/27 16:09:39"));
|
||||
releaseInfo.Size.Should().Be(1183105773);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,123 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using FluentAssertions;
|
||||
using NUnit.Framework;
|
||||
using NzbDrone.Core.Indexers.Newznab;
|
||||
using NzbDrone.Core.IndexerSearch.Definitions;
|
||||
using NzbDrone.Core.Test.Framework;
|
||||
|
||||
namespace NzbDrone.Core.Test.IndexerTests.NewznabTests
|
||||
{
|
||||
public class NewznabRequestGeneratorFixture : CoreTest<NewznabRequestGenerator>
|
||||
{
|
||||
AnimeEpisodeSearchCriteria _animeSearchCriteria;
|
||||
|
||||
[SetUp]
|
||||
public void SetUp()
|
||||
{
|
||||
Subject.Settings = new NewznabSettings()
|
||||
{
|
||||
Url = "http://127.0.0.1:1234/",
|
||||
Categories = new [] { 1, 2 },
|
||||
AnimeCategories = new [] { 3, 4 },
|
||||
ApiKey = "abcd",
|
||||
};
|
||||
|
||||
_animeSearchCriteria = new AnimeEpisodeSearchCriteria()
|
||||
{
|
||||
SceneTitles = new List<String>() { "Monkey+Island" },
|
||||
AbsoluteEpisodeNumber = 100
|
||||
};
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void should_return_one_page_for_feed()
|
||||
{
|
||||
var results = Subject.GetRecentRequests();
|
||||
|
||||
results.Should().HaveCount(1);
|
||||
|
||||
var pages = results.First().Take(10).ToList();
|
||||
|
||||
pages.Should().HaveCount(1);
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void should_use_all_categories_for_feed()
|
||||
{
|
||||
var results = Subject.GetRecentRequests();
|
||||
|
||||
results.Should().HaveCount(1);
|
||||
|
||||
var page = results.First().First();
|
||||
|
||||
page.Url.Query.Should().Contain("&cat=1,2,3,4&");
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void should_not_have_duplicate_categories()
|
||||
{
|
||||
Subject.Settings.Categories = new[] { 1, 2, 3 };
|
||||
|
||||
var results = Subject.GetRecentRequests();
|
||||
|
||||
results.Should().HaveCount(1);
|
||||
|
||||
var page = results.First().First();
|
||||
|
||||
page.Url.Query.Should().Contain("&cat=1,2,3,4&");
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void should_use_only_anime_categories_for_anime_search()
|
||||
{
|
||||
var results = Subject.GetSearchRequests(_animeSearchCriteria);
|
||||
|
||||
results.Should().HaveCount(1);
|
||||
|
||||
var page = results.First().First();
|
||||
|
||||
page.Url.Query.Should().Contain("&cat=3,4&");
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void should_use_mode_search_for_anime()
|
||||
{
|
||||
var results = Subject.GetSearchRequests(_animeSearchCriteria);
|
||||
|
||||
results.Should().HaveCount(1);
|
||||
|
||||
var page = results.First().First();
|
||||
|
||||
page.Url.Query.Should().Contain("?t=search&");
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void should_return_subsequent_pages()
|
||||
{
|
||||
var results = Subject.GetSearchRequests(_animeSearchCriteria);
|
||||
|
||||
results.Should().HaveCount(1);
|
||||
|
||||
var pages = results.First().Take(3).ToList();
|
||||
|
||||
pages[0].Url.Query.Should().Contain("&offset=0&");
|
||||
pages[1].Url.Query.Should().Contain("&offset=100&");
|
||||
pages[2].Url.Query.Should().Contain("&offset=200&");
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void should_not_get_unlimited_pages()
|
||||
{
|
||||
var results = Subject.GetSearchRequests(_animeSearchCriteria);
|
||||
|
||||
results.Should().HaveCount(1);
|
||||
|
||||
var pages = results.First().Take(500).ToList();
|
||||
|
||||
pages.Count.Should().BeLessThan(500);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,57 @@
|
|||
using System;
|
||||
using System.Linq;
|
||||
using FluentAssertions;
|
||||
using Moq;
|
||||
using NUnit.Framework;
|
||||
using NzbDrone.Common.Http;
|
||||
using NzbDrone.Core.Indexers;
|
||||
using NzbDrone.Core.Indexers.Omgwtfnzbs;
|
||||
using NzbDrone.Core.Parser.Model;
|
||||
using NzbDrone.Core.Test.Framework;
|
||||
using NzbDrone.Core.ThingiProvider;
|
||||
|
||||
namespace NzbDrone.Core.Test.IndexerTests.OmgwtfnzbsTests
|
||||
{
|
||||
[TestFixture]
|
||||
public class OmgwtfnzbsFixture : CoreTest<Omgwtfnzbs>
|
||||
{
|
||||
[SetUp]
|
||||
public void Setup()
|
||||
{
|
||||
Subject.Definition = new IndexerDefinition()
|
||||
{
|
||||
Name = "Omgwtfnzbs",
|
||||
Settings = new OmgwtfnzbsSettings()
|
||||
{
|
||||
ApiKey = "xxx",
|
||||
Username = "me@my.domain"
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void should_parse_recent_feed_from_omgwtfnzbs()
|
||||
{
|
||||
var recentFeed = ReadAllText(@"Files/RSS/omgwtfnzbs.xml");
|
||||
|
||||
Mocker.GetMock<IHttpClient>()
|
||||
.Setup(o => o.Execute(It.Is<HttpRequest>(v => v.Method == HttpMethod.GET)))
|
||||
.Returns<HttpRequest>(r => new HttpResponse(r, new HttpHeader(), recentFeed));
|
||||
|
||||
var releases = Subject.FetchRecent();
|
||||
|
||||
releases.Should().HaveCount(100);
|
||||
|
||||
var releaseInfo = releases.First();
|
||||
|
||||
releaseInfo.Title.Should().Be("Stephen.Fry.Gadget.Man.S01E05.HDTV.x264-C4TV");
|
||||
releaseInfo.DownloadProtocol.Should().Be(DownloadProtocol.Usenet);
|
||||
releaseInfo.DownloadUrl.Should().Be("http://api.omgwtfnzbs.org/sn.php?id=OAl4g&user=nzbdrone&api=nzbdrone");
|
||||
releaseInfo.InfoUrl.Should().Be("http://omgwtfnzbs.org/details.php?id=OAl4g");
|
||||
releaseInfo.CommentUrl.Should().BeNullOrEmpty();
|
||||
releaseInfo.Indexer.Should().Be(Subject.Definition.Name);
|
||||
releaseInfo.PublishDate.Should().Be(DateTime.Parse("2012/12/17 23:30:13"));
|
||||
releaseInfo.Size.Should().Be(236822906);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -3,10 +3,13 @@ using System.Collections.Generic;
|
|||
using FizzWare.NBuilder;
|
||||
using FluentValidation.Results;
|
||||
using Moq;
|
||||
using NLog;
|
||||
using NUnit.Framework;
|
||||
using NzbDrone.Common.Http;
|
||||
using NzbDrone.Core.Configuration;
|
||||
using NzbDrone.Core.Indexers;
|
||||
using NzbDrone.Core.IndexerSearch.Definitions;
|
||||
using NzbDrone.Core.Parser;
|
||||
using NzbDrone.Core.Parser.Model;
|
||||
using NzbDrone.Core.ThingiProvider;
|
||||
using NzbDrone.Core.Tv;
|
||||
|
@ -15,7 +18,7 @@ using NzbDrone.Test.Common;
|
|||
namespace NzbDrone.Core.Test.IndexerTests
|
||||
{
|
||||
[TestFixture]
|
||||
public class SeasonSearchFixture : TestBase<FetchFeedService>
|
||||
public class SeasonSearchFixture : TestBase<TestIndexer>
|
||||
{
|
||||
private Series _series;
|
||||
|
||||
|
@ -25,67 +28,68 @@ namespace NzbDrone.Core.Test.IndexerTests
|
|||
_series = Builder<Series>.CreateNew().Build();
|
||||
|
||||
Mocker.GetMock<IHttpClient>()
|
||||
.Setup(o => o.Get(It.IsAny<HttpRequest>()))
|
||||
.Setup(o => o.Execute(It.Is<HttpRequest>(v => v.Method == HttpMethod.GET)))
|
||||
.Returns<HttpRequest>(r => new HttpResponse(r, new HttpHeader(), "<xml></xml>"));
|
||||
}
|
||||
|
||||
private IndexerBase<TestIndexerSettings> WithIndexer(bool paging, int resultCount)
|
||||
private void WithIndexer(bool paging, int resultCount)
|
||||
{
|
||||
var definition = new IndexerDefinition();
|
||||
definition.Name = "Test";
|
||||
Subject.Definition = definition;
|
||||
|
||||
Subject._supportedPageSize = paging ? 100 : 0;
|
||||
|
||||
var requestGenerator = Mocker.GetMock<IIndexerRequestGenerator>();
|
||||
Subject._requestGenerator = requestGenerator.Object;
|
||||
|
||||
var requests = Builder<IndexerRequest>.CreateListOfSize(paging ? 100 : 1)
|
||||
.All()
|
||||
.WithConstructor(() => new IndexerRequest("http://my.feed.local/", HttpAccept.Rss))
|
||||
.With(v => v.HttpRequest.Method = HttpMethod.GET)
|
||||
.Build();
|
||||
|
||||
requestGenerator.Setup(s => s.GetSearchRequests(It.IsAny<SeasonSearchCriteria>()))
|
||||
.Returns(new List<IEnumerable<IndexerRequest>> { requests });
|
||||
|
||||
var parser = Mocker.GetMock<IParseIndexerResponse>();
|
||||
Subject._parser = parser.Object;
|
||||
|
||||
var results = Builder<ReleaseInfo>.CreateListOfSize(resultCount)
|
||||
.Build();
|
||||
|
||||
var indexer = Mocker.GetMock<IndexerBase<TestIndexerSettings>>();
|
||||
|
||||
indexer.Setup(s => s.Parser.Process(It.IsAny<String>(), It.IsAny<String>()))
|
||||
parser.Setup(s => s.ParseResponse(It.IsAny<IndexerResponse>()))
|
||||
.Returns(results);
|
||||
|
||||
indexer.Setup(s => s.GetSeasonSearchUrls(It.IsAny<List<String>>(), It.IsAny<Int32>(), It.IsAny<Int32>(), It.IsAny<Int32>()))
|
||||
.Returns(new List<string> { "http://www.nzbdrone.com" });
|
||||
|
||||
indexer.SetupGet(s => s.SupportedPageSize).Returns(paging ? 100 : 0);
|
||||
|
||||
var definition = new IndexerDefinition();
|
||||
definition.Name = "Test";
|
||||
|
||||
indexer.SetupGet(s => s.Definition)
|
||||
.Returns(definition);
|
||||
|
||||
return indexer.Object;
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void should_not_use_offset_if_result_count_is_less_than_90()
|
||||
{
|
||||
var indexer = WithIndexer(true, 25);
|
||||
Subject.Fetch(indexer, new SeasonSearchCriteria { Series = _series, SceneTitles = new List<string>{_series.Title} });
|
||||
WithIndexer(true, 25);
|
||||
|
||||
Mocker.GetMock<IHttpClient>().Verify(v => v.Get(It.IsAny<HttpRequest>()), Times.Once());
|
||||
Subject.Fetch(new SeasonSearchCriteria { Series = _series, SceneTitles = new List<string>{_series.Title} });
|
||||
|
||||
Mocker.GetMock<IHttpClient>().Verify(v => v.Execute(It.IsAny<HttpRequest>()), Times.Once());
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void should_not_use_offset_for_sites_that_do_not_support_it()
|
||||
{
|
||||
var indexer = WithIndexer(false, 125);
|
||||
Subject.Fetch(indexer, new SeasonSearchCriteria { Series = _series, SceneTitles = new List<string> { _series.Title } });
|
||||
WithIndexer(false, 125);
|
||||
|
||||
Mocker.GetMock<IHttpClient>().Verify(v => v.Get(It.IsAny<HttpRequest>()), Times.Once());
|
||||
Subject.Fetch(new SeasonSearchCriteria { Series = _series, SceneTitles = new List<string> { _series.Title } });
|
||||
|
||||
Mocker.GetMock<IHttpClient>().Verify(v => v.Execute(It.IsAny<HttpRequest>()), Times.Once());
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void should_not_use_offset_if_its_already_tried_10_times()
|
||||
{
|
||||
var indexer = WithIndexer(true, 100);
|
||||
Subject.Fetch(indexer, new SeasonSearchCriteria { Series = _series, SceneTitles = new List<string> { _series.Title } });
|
||||
WithIndexer(true, 100);
|
||||
|
||||
Mocker.GetMock<IHttpClient>().Verify(v => v.Get(It.IsAny<HttpRequest>()), Times.Exactly(10));
|
||||
}
|
||||
}
|
||||
Subject.Fetch(new SeasonSearchCriteria { Series = _series, SceneTitles = new List<string> { _series.Title } });
|
||||
|
||||
public class TestIndexerSettings : IProviderConfig
|
||||
{
|
||||
public ValidationResult Validate()
|
||||
{
|
||||
throw new NotImplementedException();
|
||||
Mocker.GetMock<IHttpClient>().Verify(v => v.Execute(It.IsAny<HttpRequest>()), Times.Exactly(10));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using NLog;
|
||||
using NzbDrone.Common.Http;
|
||||
using NzbDrone.Core.Configuration;
|
||||
using NzbDrone.Core.Indexers;
|
||||
using NzbDrone.Core.Parser;
|
||||
|
||||
namespace NzbDrone.Core.Test.IndexerTests
|
||||
{
|
||||
public class TestIndexer : HttpIndexerBase<TestIndexerSettings>
|
||||
{
|
||||
public override DownloadProtocol Protocol { get { return DownloadProtocol.Usenet; } }
|
||||
|
||||
public Int32 _supportedPageSize;
|
||||
public override Int32 PageSize { get { return _supportedPageSize; } }
|
||||
|
||||
public TestIndexer(IHttpClient httpClient, IConfigService configService, IParsingService parsingService, Logger logger)
|
||||
: base(httpClient, configService, parsingService, logger)
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
public IIndexerRequestGenerator _requestGenerator;
|
||||
public override IIndexerRequestGenerator GetRequestGenerator()
|
||||
{
|
||||
return _requestGenerator;
|
||||
}
|
||||
|
||||
public IParseIndexerResponse _parser;
|
||||
public override IParseIndexerResponse GetParser()
|
||||
{
|
||||
return _parser;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,17 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using FluentValidation.Results;
|
||||
using NzbDrone.Core.ThingiProvider;
|
||||
|
||||
namespace NzbDrone.Core.Test.IndexerTests
|
||||
{
|
||||
public class TestIndexerSettings : IProviderConfig
|
||||
{
|
||||
public ValidationResult Validate()
|
||||
{
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -174,11 +174,18 @@
|
|||
<Compile Include="Housekeeping\Housekeepers\FixFutureRunScheduledTasksFixture.cs" />
|
||||
<Compile Include="IndexerSearchTests\NzbSearchServiceFixture.cs" />
|
||||
<Compile Include="IndexerSearchTests\SearchDefinitionFixture.cs" />
|
||||
<Compile Include="IndexerTests\AnimezbTests\AnimezbFixture.cs" />
|
||||
<Compile Include="IndexerTests\BasicRssParserFixture.cs" />
|
||||
<Compile Include="IndexerTests\IndexerServiceFixture.cs" />
|
||||
<Compile Include="IndexerTests\IntegrationTests\IndexerIntegrationTests.cs" />
|
||||
<Compile Include="IndexerTests\NewznabTests\NewznabFixture.cs" />
|
||||
<Compile Include="IndexerTests\NewznabTests\NewznabRequestGeneratorFixture.cs" />
|
||||
<Compile Include="IndexerTests\NewznabTests\NewznabSettingFixture.cs" />
|
||||
<Compile Include="IndexerTests\FanzubTests\FanzubFixture.cs" />
|
||||
<Compile Include="IndexerTests\OmgwtfnzbsTests\OmgwtfnzbsFixture.cs" />
|
||||
<Compile Include="IndexerTests\SeasonSearchFixture.cs" />
|
||||
<Compile Include="IndexerTests\TestIndexer.cs" />
|
||||
<Compile Include="IndexerTests\TestIndexerSettings.cs" />
|
||||
<Compile Include="IndexerTests\XElementExtensionsFixture.cs" />
|
||||
<Compile Include="InstrumentationTests\DatabaseTargetFixture.cs" />
|
||||
<Compile Include="JobTests\JobRepositoryFixture.cs" />
|
||||
|
@ -317,6 +324,10 @@
|
|||
<Link>sqlite3.dll</Link>
|
||||
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
|
||||
</Content>
|
||||
<Content Include="Files\RSS\fanzub.xml">
|
||||
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
|
||||
</Content>
|
||||
<Content Include="License.txt" />
|
||||
<None Include="..\NzbDrone.Test.Common\App.config">
|
||||
<Link>App.config</Link>
|
||||
</None>
|
||||
|
@ -348,7 +359,7 @@
|
|||
<Content Include="Files\RSS\filesharingtalk.xml">
|
||||
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
|
||||
</Content>
|
||||
<Content Include="Files\RSS\newznab.xml">
|
||||
<Content Include="Files\RSS\newznab_nzb_su.xml">
|
||||
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
|
||||
</Content>
|
||||
<Content Include="Files\RSS\nzbclub.xml">
|
||||
|
|
|
@ -25,7 +25,6 @@ namespace NzbDrone.Core.IndexerSearch
|
|||
public class NzbSearchService : ISearchForNzb
|
||||
{
|
||||
private readonly IIndexerFactory _indexerFactory;
|
||||
private readonly IFetchFeedFromIndexers _feedFetcher;
|
||||
private readonly ISceneMappingService _sceneMapping;
|
||||
private readonly ISeriesService _seriesService;
|
||||
private readonly IEpisodeService _episodeService;
|
||||
|
@ -33,7 +32,6 @@ namespace NzbDrone.Core.IndexerSearch
|
|||
private readonly Logger _logger;
|
||||
|
||||
public NzbSearchService(IIndexerFactory indexerFactory,
|
||||
IFetchFeedFromIndexers feedFetcher,
|
||||
ISceneMappingService sceneMapping,
|
||||
ISeriesService seriesService,
|
||||
IEpisodeService episodeService,
|
||||
|
@ -41,7 +39,6 @@ namespace NzbDrone.Core.IndexerSearch
|
|||
Logger logger)
|
||||
{
|
||||
_indexerFactory = indexerFactory;
|
||||
_feedFetcher = feedFetcher;
|
||||
_sceneMapping = sceneMapping;
|
||||
_seriesService = seriesService;
|
||||
_episodeService = episodeService;
|
||||
|
@ -126,7 +123,7 @@ namespace NzbDrone.Core.IndexerSearch
|
|||
else
|
||||
searchSpec.EpisodeNumber = episode.SceneEpisodeNumber;
|
||||
|
||||
var decisions = Dispatch(indexer => _feedFetcher.Fetch(indexer, searchSpec), searchSpec);
|
||||
var decisions = Dispatch(indexer => indexer.Fetch(searchSpec), searchSpec);
|
||||
downloadDecisions.AddRange(decisions);
|
||||
}
|
||||
else
|
||||
|
@ -134,7 +131,7 @@ namespace NzbDrone.Core.IndexerSearch
|
|||
var searchSpec = Get<SeasonSearchCriteria>(series, sceneSeasonEpisodes.ToList());
|
||||
searchSpec.SeasonNumber = sceneSeasonEpisodes.Key;
|
||||
|
||||
var decisions = Dispatch(indexer => _feedFetcher.Fetch(indexer, searchSpec), searchSpec);
|
||||
var decisions = Dispatch(indexer => indexer.Fetch(searchSpec), searchSpec);
|
||||
downloadDecisions.AddRange(decisions);
|
||||
}
|
||||
}
|
||||
|
@ -144,7 +141,7 @@ namespace NzbDrone.Core.IndexerSearch
|
|||
var searchSpec = Get<SeasonSearchCriteria>(series, episodes);
|
||||
searchSpec.SeasonNumber = seasonNumber;
|
||||
|
||||
var decisions = Dispatch(indexer => _feedFetcher.Fetch(indexer, searchSpec), searchSpec);
|
||||
var decisions = Dispatch(indexer => indexer.Fetch(searchSpec), searchSpec);
|
||||
downloadDecisions.AddRange(decisions);
|
||||
}
|
||||
|
||||
|
@ -175,7 +172,7 @@ namespace NzbDrone.Core.IndexerSearch
|
|||
searchSpec.SeasonNumber = episode.SeasonNumber;
|
||||
}
|
||||
|
||||
return Dispatch(indexer => _feedFetcher.Fetch(indexer, searchSpec), searchSpec);
|
||||
return Dispatch(indexer => indexer.Fetch(searchSpec), searchSpec);
|
||||
}
|
||||
|
||||
private List<DownloadDecision> SearchDaily(Series series, Episode episode)
|
||||
|
@ -184,7 +181,7 @@ namespace NzbDrone.Core.IndexerSearch
|
|||
var searchSpec = Get<DailyEpisodeSearchCriteria>(series, new List<Episode>{ episode });
|
||||
searchSpec.AirDate = airDate;
|
||||
|
||||
return Dispatch(indexer => _feedFetcher.Fetch(indexer, searchSpec), searchSpec);
|
||||
return Dispatch(indexer => indexer.Fetch(searchSpec), searchSpec);
|
||||
}
|
||||
|
||||
private List<DownloadDecision> SearchAnime(Series series, Episode episode)
|
||||
|
@ -202,7 +199,7 @@ namespace NzbDrone.Core.IndexerSearch
|
|||
throw new ArgumentOutOfRangeException("AbsoluteEpisodeNumber", "Can not search for an episode absolute episode number of zero");
|
||||
}
|
||||
|
||||
return Dispatch(indexer => _feedFetcher.Fetch(indexer, searchSpec), searchSpec);
|
||||
return Dispatch(indexer => indexer.Fetch(searchSpec), searchSpec);
|
||||
}
|
||||
|
||||
private List<DownloadDecision> SearchSpecial(Series series, List<Episode> episodes)
|
||||
|
@ -213,7 +210,7 @@ namespace NzbDrone.Core.IndexerSearch
|
|||
.SelectMany(e => searchSpec.QueryTitles.Select(title => title + " " + SearchCriteriaBase.GetQueryTitle(e.Title)))
|
||||
.ToArray();
|
||||
|
||||
return Dispatch(indexer => _feedFetcher.Fetch(indexer, searchSpec), searchSpec);
|
||||
return Dispatch(indexer => indexer.Fetch(searchSpec), searchSpec);
|
||||
}
|
||||
|
||||
private List<DownloadDecision> SearchAnimeSeason(Series series, List<Episode> episodes)
|
||||
|
|
|
@ -3,101 +3,32 @@ using System.Collections.Generic;
|
|||
using System.Linq;
|
||||
using System.Text.RegularExpressions;
|
||||
using FluentValidation.Results;
|
||||
using NLog;
|
||||
using NzbDrone.Common.Http;
|
||||
using NzbDrone.Core.Configuration;
|
||||
using NzbDrone.Core.Parser;
|
||||
using NzbDrone.Core.ThingiProvider;
|
||||
|
||||
namespace NzbDrone.Core.Indexers.Animezb
|
||||
{
|
||||
public class Animezb : IndexerBase<NullConfig>
|
||||
public class Animezb : HttpIndexerBase<NullConfig>
|
||||
{
|
||||
private static readonly Regex RemoveCharactersRegex = new Regex(@"[!?`]", RegexOptions.Compiled);
|
||||
private static readonly Regex RemoveSingleCharacterRegex = new Regex(@"\b[a-z0-9]\b", RegexOptions.Compiled | RegexOptions.IgnoreCase);
|
||||
private static readonly Regex DuplicateCharacterRegex = new Regex(@"[ +]{2,}", RegexOptions.Compiled | RegexOptions.IgnoreCase);
|
||||
public override DownloadProtocol Protocol { get { return DownloadProtocol.Usenet; } }
|
||||
|
||||
public override DownloadProtocol Protocol
|
||||
public Animezb(IHttpClient httpClient, IConfigService configService, IParsingService parsingService, Logger logger)
|
||||
: base(httpClient, configService, parsingService, logger)
|
||||
{
|
||||
get
|
||||
{
|
||||
return DownloadProtocol.Usenet;
|
||||
}
|
||||
}
|
||||
|
||||
public override bool SupportsSearch
|
||||
{
|
||||
get
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
public override IParseFeed Parser
|
||||
{
|
||||
get
|
||||
{
|
||||
return new AnimezbParser();
|
||||
}
|
||||
}
|
||||
|
||||
public override IEnumerable<string> RecentFeed
|
||||
{
|
||||
get
|
||||
{
|
||||
yield return "https://animezb.com/rss?cat=anime&max=100";
|
||||
}
|
||||
}
|
||||
|
||||
public override IEnumerable<string> GetEpisodeSearchUrls(List<String> titles, int tvRageId, int seasonNumber, int episodeNumber)
|
||||
{
|
||||
return new List<string>();
|
||||
}
|
||||
|
||||
public override IEnumerable<string> GetSeasonSearchUrls(List<String> titles, int tvRageId, int seasonNumber, int offset)
|
||||
{
|
||||
return new List<string>();
|
||||
}
|
||||
|
||||
public override IEnumerable<string> GetDailyEpisodeSearchUrls(List<String> titles, int tvRageId, DateTime date)
|
||||
{
|
||||
return new List<string>();
|
||||
}
|
||||
|
||||
public override IEnumerable<string> GetAnimeEpisodeSearchUrls(List<String> titles, int tvRageId, int absoluteEpisodeNumber)
|
||||
{
|
||||
return titles.SelectMany(title =>
|
||||
RecentFeed.Select(url =>
|
||||
String.Format("{0}&q={1}", url, GetSearchQuery(title, absoluteEpisodeNumber))));
|
||||
|
||||
}
|
||||
|
||||
public override IEnumerable<string> GetSearchUrls(string query, int offset)
|
||||
public override IIndexerRequestGenerator GetRequestGenerator()
|
||||
{
|
||||
return new List<string>();
|
||||
return new AnimezbRequestGenerator();
|
||||
}
|
||||
|
||||
public override ValidationResult Test()
|
||||
public override IParseIndexerResponse GetParser()
|
||||
{
|
||||
return new ValidationResult();
|
||||
}
|
||||
|
||||
private String GetSearchQuery(string title, int absoluteEpisodeNumber)
|
||||
{
|
||||
var match = RemoveSingleCharacterRegex.Match(title);
|
||||
|
||||
if (match.Success)
|
||||
{
|
||||
title = RemoveSingleCharacterRegex.Replace(title, "");
|
||||
|
||||
//Since we removed a character we need to not wrap it in quotes and hope animedb doesn't give us a million results
|
||||
return CleanTitle(String.Format("{0}+{1:00}", title, absoluteEpisodeNumber));
|
||||
}
|
||||
|
||||
//Wrap the query in quotes and search!
|
||||
return CleanTitle(String.Format("\"{0}+{1:00}\"", title, absoluteEpisodeNumber));
|
||||
}
|
||||
|
||||
private String CleanTitle(String title)
|
||||
{
|
||||
title = RemoveCharactersRegex.Replace(title, "");
|
||||
return DuplicateCharacterRegex.Replace(title, "+");
|
||||
return new RssParser() { UseEnclosureLength = true };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,31 +0,0 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Xml.Linq;
|
||||
using System.Linq;
|
||||
|
||||
namespace NzbDrone.Core.Indexers.Animezb
|
||||
{
|
||||
public class AnimezbParser : RssParserBase
|
||||
{
|
||||
protected override string GetNzbInfoUrl(XElement item)
|
||||
{
|
||||
IEnumerable<XElement> matches = item.DescendantsAndSelf("link");
|
||||
if (matches.Any())
|
||||
{
|
||||
return matches.First().Value;
|
||||
}
|
||||
return String.Empty;
|
||||
}
|
||||
|
||||
protected override long GetSize(XElement item)
|
||||
{
|
||||
IEnumerable<XElement> matches = item.DescendantsAndSelf("enclosure");
|
||||
if (matches.Any())
|
||||
{
|
||||
XElement enclosureElement = matches.First();
|
||||
return Convert.ToInt64(enclosureElement.Attribute("length").Value);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,105 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.RegularExpressions;
|
||||
using NzbDrone.Common;
|
||||
using NzbDrone.Common.Http;
|
||||
using NzbDrone.Core.IndexerSearch.Definitions;
|
||||
|
||||
namespace NzbDrone.Core.Indexers.Animezb
|
||||
{
|
||||
public class AnimezbRequestGenerator : IIndexerRequestGenerator
|
||||
{
|
||||
private static readonly Regex RemoveCharactersRegex = new Regex(@"[!?`]", RegexOptions.Compiled);
|
||||
private static readonly Regex RemoveSingleCharacterRegex = new Regex(@"\b[a-z0-9]\b", RegexOptions.Compiled | RegexOptions.IgnoreCase);
|
||||
private static readonly Regex DuplicateCharacterRegex = new Regex(@"[ +]{2,}", RegexOptions.Compiled | RegexOptions.IgnoreCase);
|
||||
|
||||
public String BaseUrl { get; set; }
|
||||
public Int32 PageSize { get; set; }
|
||||
|
||||
public AnimezbRequestGenerator()
|
||||
{
|
||||
BaseUrl = "https://animezb.com/rss?cat=anime";
|
||||
PageSize = 100;
|
||||
}
|
||||
|
||||
public virtual IList<IEnumerable<IndexerRequest>> GetRecentRequests()
|
||||
{
|
||||
var pageableRequests = new List<IEnumerable<IndexerRequest>>();
|
||||
|
||||
pageableRequests.AddIfNotNull(GetPagedRequests(null));
|
||||
|
||||
return pageableRequests;
|
||||
}
|
||||
|
||||
public virtual IList<IEnumerable<IndexerRequest>> GetSearchRequests(SingleEpisodeSearchCriteria searchCriteria)
|
||||
{
|
||||
return new List<IEnumerable<IndexerRequest>>();
|
||||
}
|
||||
|
||||
public virtual IList<IEnumerable<IndexerRequest>> GetSearchRequests(SeasonSearchCriteria searchCriteria)
|
||||
{
|
||||
return new List<IEnumerable<IndexerRequest>>();
|
||||
}
|
||||
|
||||
public virtual IList<IEnumerable<IndexerRequest>> GetSearchRequests(DailyEpisodeSearchCriteria searchCriteria)
|
||||
{
|
||||
return new List<IEnumerable<IndexerRequest>>();
|
||||
}
|
||||
|
||||
public virtual IList<IEnumerable<IndexerRequest>> GetSearchRequests(AnimeEpisodeSearchCriteria searchCriteria)
|
||||
{
|
||||
var pageableRequests = new List<IEnumerable<IndexerRequest>>();
|
||||
|
||||
foreach (var queryTitle in searchCriteria.QueryTitles)
|
||||
{
|
||||
var searchQuery = GetSearchQuery(queryTitle, searchCriteria.AbsoluteEpisodeNumber);
|
||||
|
||||
pageableRequests.Add(GetPagedRequests(searchQuery));
|
||||
}
|
||||
|
||||
return pageableRequests;
|
||||
}
|
||||
|
||||
public virtual IList<IEnumerable<IndexerRequest>> GetSearchRequests(SpecialEpisodeSearchCriteria searchCriteria)
|
||||
{
|
||||
return new List<IEnumerable<IndexerRequest>>();
|
||||
}
|
||||
|
||||
private IEnumerable<IndexerRequest> GetPagedRequests(String query)
|
||||
{
|
||||
var url = new StringBuilder();
|
||||
url.AppendFormat("{0}&max={1}", BaseUrl, PageSize);
|
||||
|
||||
if (query.IsNotNullOrWhiteSpace())
|
||||
{
|
||||
url.AppendFormat("&q={0}", query);
|
||||
}
|
||||
|
||||
yield return new IndexerRequest(url.ToString(), HttpAccept.Rss);
|
||||
}
|
||||
|
||||
private String GetSearchQuery(String title, Int32 absoluteEpisodeNumber)
|
||||
{
|
||||
var match = RemoveSingleCharacterRegex.Match(title);
|
||||
|
||||
if (match.Success)
|
||||
{
|
||||
title = RemoveSingleCharacterRegex.Replace(title, "");
|
||||
|
||||
//Since we removed a character we need to not wrap it in quotes and hope animedb doesn't give us a million results
|
||||
return CleanTitle(String.Format("{0}+{1:00}", title, absoluteEpisodeNumber));
|
||||
}
|
||||
|
||||
//Wrap the query in quotes and search!
|
||||
return CleanTitle(String.Format("\"{0}+{1:00}\"", title, absoluteEpisodeNumber));
|
||||
}
|
||||
|
||||
private String CleanTitle(String title)
|
||||
{
|
||||
title = RemoveCharactersRegex.Replace(title, "");
|
||||
return DuplicateCharacterRegex.Replace(title, "+");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,47 +0,0 @@
|
|||
using System;
|
||||
using System.Xml.Linq;
|
||||
using NzbDrone.Core.Parser.Model;
|
||||
|
||||
namespace NzbDrone.Core.Indexers
|
||||
{
|
||||
public class BasicTorrentRssParser : RssParserBase
|
||||
{
|
||||
protected override ReleaseInfo CreateNewReleaseInfo()
|
||||
{
|
||||
return new TorrentInfo();
|
||||
}
|
||||
|
||||
protected override ReleaseInfo PostProcessor(XElement item, ReleaseInfo currentResult)
|
||||
{
|
||||
var torrentInfo = (TorrentInfo)currentResult;
|
||||
|
||||
torrentInfo.MagnetUrl = MagnetUrl(item);
|
||||
torrentInfo.InfoHash = InfoHash(item);
|
||||
|
||||
return torrentInfo;
|
||||
}
|
||||
|
||||
protected override long GetSize(XElement item)
|
||||
{
|
||||
var elementLength = GetTorrentElement(item).Element("contentLength");
|
||||
return Convert.ToInt64(elementLength.Value);
|
||||
}
|
||||
|
||||
protected virtual string MagnetUrl(XElement item)
|
||||
{
|
||||
var elementLength = GetTorrentElement(item).Element("magnetURI");
|
||||
return elementLength.Value;
|
||||
}
|
||||
|
||||
protected virtual string InfoHash(XElement item)
|
||||
{
|
||||
var elementLength = GetTorrentElement(item).Element("infoHash");
|
||||
return elementLength.Value;
|
||||
}
|
||||
|
||||
private static XElement GetTorrentElement(XElement item)
|
||||
{
|
||||
return item.Element("torrent");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,28 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using NzbDrone.Common.Exceptions;
|
||||
|
||||
namespace NzbDrone.Core.Indexers.Exceptions
|
||||
{
|
||||
public class IndexerException : NzbDroneException
|
||||
{
|
||||
private readonly IndexerResponse _indexerResponse;
|
||||
|
||||
public IndexerException(IndexerResponse response, string message, params object[] args)
|
||||
: base(message, args)
|
||||
{
|
||||
}
|
||||
|
||||
public IndexerException(IndexerResponse response, string message)
|
||||
: base(message)
|
||||
{
|
||||
}
|
||||
|
||||
public IndexerResponse Response
|
||||
{
|
||||
get { return _indexerResponse; }
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
using NzbDrone.Common.Exceptions;
|
||||
|
||||
namespace NzbDrone.Core.Indexers.Newznab
|
||||
namespace NzbDrone.Core.Indexers.Exceptions
|
||||
{
|
||||
public class SizeParsingException : NzbDroneException
|
||||
{
|
|
@ -3,88 +3,32 @@ using System.Collections.Generic;
|
|||
using System.Linq;
|
||||
using System.Text.RegularExpressions;
|
||||
using FluentValidation.Results;
|
||||
using NLog;
|
||||
using NzbDrone.Common.Http;
|
||||
using NzbDrone.Core.Configuration;
|
||||
using NzbDrone.Core.Parser;
|
||||
using NzbDrone.Core.ThingiProvider;
|
||||
|
||||
namespace NzbDrone.Core.Indexers.Fanzub
|
||||
{
|
||||
public class Fanzub : IndexerBase<NullConfig>
|
||||
public class Fanzub : HttpIndexerBase<NullConfig>
|
||||
{
|
||||
private static readonly Regex RemoveCharactersRegex = new Regex(@"[!?`]", RegexOptions.Compiled);
|
||||
public override DownloadProtocol Protocol { get { return DownloadProtocol.Usenet; } }
|
||||
|
||||
public override DownloadProtocol Protocol
|
||||
public Fanzub(IHttpClient httpClient, IConfigService configService, IParsingService parsingService, Logger logger)
|
||||
: base(httpClient, configService, parsingService, logger)
|
||||
{
|
||||
get
|
||||
{
|
||||
return DownloadProtocol.Usenet;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public override bool SupportsSearch
|
||||
public override IIndexerRequestGenerator GetRequestGenerator()
|
||||
{
|
||||
get
|
||||
{
|
||||
return true;
|
||||
}
|
||||
return new FanzubRequestGenerator();
|
||||
}
|
||||
|
||||
public override IParseFeed Parser
|
||||
public override IParseIndexerResponse GetParser()
|
||||
{
|
||||
get
|
||||
{
|
||||
return new FanzubParser();
|
||||
}
|
||||
}
|
||||
|
||||
public override IEnumerable<string> RecentFeed
|
||||
{
|
||||
get
|
||||
{
|
||||
yield return "http://fanzub.com/rss/?cat=anime&max=100";
|
||||
}
|
||||
}
|
||||
|
||||
public override IEnumerable<string> GetEpisodeSearchUrls(List<String> titles, int tvRageId, int seasonNumber, int episodeNumber)
|
||||
{
|
||||
return new List<string>();
|
||||
}
|
||||
|
||||
public override IEnumerable<string> GetSeasonSearchUrls(List<String> titles, int tvRageId, int seasonNumber, int offset)
|
||||
{
|
||||
return new List<string>();
|
||||
}
|
||||
|
||||
public override IEnumerable<string> GetDailyEpisodeSearchUrls(List<String> titles, int tvRageId, DateTime date)
|
||||
{
|
||||
return new List<string>();
|
||||
}
|
||||
|
||||
public override IEnumerable<string> GetAnimeEpisodeSearchUrls(List<String> titles, int tvRageId, int absoluteEpisodeNumber)
|
||||
{
|
||||
return RecentFeed.Select(url => String.Format("{0}&q={1}",
|
||||
url,
|
||||
String.Join("|", titles.SelectMany(title => GetTitleSearchStrings(title, absoluteEpisodeNumber)))));
|
||||
}
|
||||
|
||||
public override IEnumerable<string> GetSearchUrls(string query, int offset)
|
||||
{
|
||||
return new List<string>();
|
||||
}
|
||||
|
||||
public override ValidationResult Test()
|
||||
{
|
||||
return new ValidationResult();
|
||||
}
|
||||
|
||||
private IEnumerable<String> GetTitleSearchStrings(string title, int absoluteEpisodeNumber)
|
||||
{
|
||||
var formats = new[] { "{0}%20{1:00}", "{0}%20-%20{1:00}" };
|
||||
|
||||
return formats.Select(s => "\"" + String.Format(s, CleanTitle(title), absoluteEpisodeNumber) + "\"" );
|
||||
}
|
||||
|
||||
private String CleanTitle(String title)
|
||||
{
|
||||
return RemoveCharactersRegex.Replace(title, "");
|
||||
return new RssParser() { UseEnclosureUrl = true, UseEnclosureLength = true };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,31 +0,0 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Xml.Linq;
|
||||
using System.Linq;
|
||||
|
||||
namespace NzbDrone.Core.Indexers.Fanzub
|
||||
{
|
||||
public class FanzubParser : RssParserBase
|
||||
{
|
||||
protected override string GetNzbInfoUrl(XElement item)
|
||||
{
|
||||
IEnumerable<XElement> matches = item.DescendantsAndSelf("link");
|
||||
if (matches.Any())
|
||||
{
|
||||
return matches.First().Value;
|
||||
}
|
||||
return String.Empty;
|
||||
}
|
||||
|
||||
protected override long GetSize(XElement item)
|
||||
{
|
||||
IEnumerable<XElement> matches = item.DescendantsAndSelf("enclosure");
|
||||
if (matches.Any())
|
||||
{
|
||||
XElement enclosureElement = matches.First();
|
||||
return Convert.ToInt64(enclosureElement.Attribute("length").Value);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,90 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.RegularExpressions;
|
||||
using NzbDrone.Common;
|
||||
using NzbDrone.Common.Http;
|
||||
using NzbDrone.Core.IndexerSearch.Definitions;
|
||||
|
||||
namespace NzbDrone.Core.Indexers.Fanzub
|
||||
{
|
||||
public class FanzubRequestGenerator : IIndexerRequestGenerator
|
||||
{
|
||||
private static readonly Regex RemoveCharactersRegex = new Regex(@"[!?`]", RegexOptions.Compiled);
|
||||
|
||||
public String BaseUrl { get; set; }
|
||||
public Int32 PageSize { get; set; }
|
||||
|
||||
public FanzubRequestGenerator()
|
||||
{
|
||||
BaseUrl = "http://fanzub.com/rss/?cat=anime";
|
||||
PageSize = 100;
|
||||
}
|
||||
|
||||
public virtual IList<IEnumerable<IndexerRequest>> GetRecentRequests()
|
||||
{
|
||||
var pageableRequests = new List<IEnumerable<IndexerRequest>>();
|
||||
|
||||
pageableRequests.AddIfNotNull(GetPagedRequests(null));
|
||||
|
||||
return pageableRequests;
|
||||
}
|
||||
|
||||
public virtual IList<IEnumerable<IndexerRequest>> GetSearchRequests(SingleEpisodeSearchCriteria searchCriteria)
|
||||
{
|
||||
return new List<IEnumerable<IndexerRequest>>();
|
||||
}
|
||||
|
||||
public virtual IList<IEnumerable<IndexerRequest>> GetSearchRequests(SeasonSearchCriteria searchCriteria)
|
||||
{
|
||||
return new List<IEnumerable<IndexerRequest>>();
|
||||
}
|
||||
|
||||
public virtual IList<IEnumerable<IndexerRequest>> GetSearchRequests(DailyEpisodeSearchCriteria searchCriteria)
|
||||
{
|
||||
return new List<IEnumerable<IndexerRequest>>();
|
||||
}
|
||||
|
||||
public virtual IList<IEnumerable<IndexerRequest>> GetSearchRequests(AnimeEpisodeSearchCriteria searchCriteria)
|
||||
{
|
||||
var pageableRequests = new List<IEnumerable<IndexerRequest>>();
|
||||
|
||||
var searchTitles = searchCriteria.QueryTitles.SelectMany(v => GetTitleSearchStrings(v, searchCriteria.AbsoluteEpisodeNumber)).ToList();
|
||||
|
||||
pageableRequests.Add(GetPagedRequests(String.Join("|", searchTitles)));
|
||||
|
||||
return pageableRequests;
|
||||
}
|
||||
|
||||
public virtual IList<IEnumerable<IndexerRequest>> GetSearchRequests(SpecialEpisodeSearchCriteria searchCriteria)
|
||||
{
|
||||
return new List<IEnumerable<IndexerRequest>>();
|
||||
}
|
||||
|
||||
private IEnumerable<IndexerRequest> GetPagedRequests(String query)
|
||||
{
|
||||
var url = new StringBuilder();
|
||||
url.AppendFormat("{0}&max={1}", BaseUrl, PageSize);
|
||||
|
||||
if (query.IsNotNullOrWhiteSpace())
|
||||
{
|
||||
url.AppendFormat("&q={0}", query);
|
||||
}
|
||||
|
||||
yield return new IndexerRequest(url.ToString(), HttpAccept.Rss);
|
||||
}
|
||||
|
||||
private IEnumerable<String> GetTitleSearchStrings(string title, int absoluteEpisodeNumber)
|
||||
{
|
||||
var formats = new[] { "{0}%20{1:00}", "{0}%20-%20{1:00}" };
|
||||
|
||||
return formats.Select(s => "\"" + String.Format(s, CleanTitle(title), absoluteEpisodeNumber) + "\"");
|
||||
}
|
||||
|
||||
private String CleanTitle(String title)
|
||||
{
|
||||
return RemoveCharactersRegex.Replace(title, "");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -15,13 +15,11 @@ namespace NzbDrone.Core.Indexers
|
|||
public class FetchAndParseRssService : IFetchAndParseRss
|
||||
{
|
||||
private readonly IIndexerFactory _indexerFactory;
|
||||
private readonly IFetchFeedFromIndexers _feedFetcher;
|
||||
private readonly Logger _logger;
|
||||
|
||||
public FetchAndParseRssService(IIndexerFactory indexerFactory, IFetchFeedFromIndexers feedFetcher, Logger logger)
|
||||
public FetchAndParseRssService(IIndexerFactory indexerFactory, Logger logger)
|
||||
{
|
||||
_indexerFactory = indexerFactory;
|
||||
_feedFetcher = feedFetcher;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
|
@ -48,7 +46,7 @@ namespace NzbDrone.Core.Indexers
|
|||
|
||||
var task = taskFactory.StartNew(() =>
|
||||
{
|
||||
var indexerFeed = _feedFetcher.FetchRss(indexerLocal);
|
||||
var indexerFeed = indexerLocal.FetchRecent();
|
||||
|
||||
lock (result)
|
||||
{
|
||||
|
|
|
@ -0,0 +1,229 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Net;
|
||||
using System.Text;
|
||||
using FluentValidation.Results;
|
||||
using NLog;
|
||||
using NzbDrone.Common;
|
||||
using NzbDrone.Common.Http;
|
||||
using NzbDrone.Core.Configuration;
|
||||
using NzbDrone.Core.Indexers.Exceptions;
|
||||
using NzbDrone.Core.IndexerSearch.Definitions;
|
||||
using NzbDrone.Core.Parser;
|
||||
using NzbDrone.Core.Parser.Model;
|
||||
using NzbDrone.Core.ThingiProvider;
|
||||
|
||||
namespace NzbDrone.Core.Indexers
|
||||
{
|
||||
public abstract class HttpIndexerBase<TSettings> : IndexerBase<TSettings>
|
||||
where TSettings : IProviderConfig, new()
|
||||
{
|
||||
private const Int32 MaxNumResultsPerQuery = 1000;
|
||||
|
||||
private readonly IHttpClient _httpClient;
|
||||
|
||||
public override bool SupportsRss { get { return true; } }
|
||||
public override bool SupportsSearch { get { return true; } }
|
||||
public bool SupportsPaging { get { return PageSize > 0; } }
|
||||
|
||||
public virtual Int32 PageSize { get { return 0; } }
|
||||
|
||||
public abstract IIndexerRequestGenerator GetRequestGenerator();
|
||||
public abstract IParseIndexerResponse GetParser();
|
||||
|
||||
public HttpIndexerBase(IHttpClient httpClient, IConfigService configService, IParsingService parsingService, Logger logger)
|
||||
: base(configService, parsingService, logger)
|
||||
{
|
||||
_httpClient = httpClient;
|
||||
}
|
||||
|
||||
public override IList<ReleaseInfo> FetchRecent()
|
||||
{
|
||||
if (!SupportsRss)
|
||||
{
|
||||
return new List<ReleaseInfo>();
|
||||
}
|
||||
|
||||
var generator = GetRequestGenerator();
|
||||
|
||||
return FetchReleases(generator.GetRecentRequests());
|
||||
}
|
||||
|
||||
public override IList<ReleaseInfo> Fetch(SingleEpisodeSearchCriteria searchCriteria)
|
||||
{
|
||||
if (!SupportsSearch)
|
||||
{
|
||||
return new List<ReleaseInfo>();
|
||||
}
|
||||
|
||||
var generator = GetRequestGenerator();
|
||||
|
||||
return FetchReleases(generator.GetSearchRequests(searchCriteria));
|
||||
}
|
||||
|
||||
public override IList<ReleaseInfo> Fetch(SeasonSearchCriteria searchCriteria)
|
||||
{
|
||||
if (!SupportsSearch)
|
||||
{
|
||||
return new List<ReleaseInfo>();
|
||||
}
|
||||
|
||||
var generator = GetRequestGenerator();
|
||||
|
||||
return FetchReleases(generator.GetSearchRequests(searchCriteria));
|
||||
}
|
||||
|
||||
public override IList<ReleaseInfo> Fetch(DailyEpisodeSearchCriteria searchCriteria)
|
||||
{
|
||||
if (!SupportsSearch)
|
||||
{
|
||||
return new List<ReleaseInfo>();
|
||||
}
|
||||
|
||||
var generator = GetRequestGenerator();
|
||||
|
||||
return FetchReleases(generator.GetSearchRequests(searchCriteria));
|
||||
}
|
||||
|
||||
public override IList<ReleaseInfo> Fetch(AnimeEpisodeSearchCriteria searchCriteria)
|
||||
{
|
||||
if (!SupportsSearch)
|
||||
{
|
||||
return new List<ReleaseInfo>();
|
||||
}
|
||||
|
||||
var generator = GetRequestGenerator();
|
||||
|
||||
return FetchReleases(generator.GetSearchRequests(searchCriteria));
|
||||
}
|
||||
|
||||
public override IList<ReleaseInfo> Fetch(SpecialEpisodeSearchCriteria searchCriteria)
|
||||
{
|
||||
if (!SupportsSearch)
|
||||
{
|
||||
return new List<ReleaseInfo>();
|
||||
}
|
||||
|
||||
var generator = GetRequestGenerator();
|
||||
|
||||
return FetchReleases(generator.GetSearchRequests(searchCriteria));
|
||||
}
|
||||
|
||||
protected virtual IList<ReleaseInfo> FetchReleases(IList<IEnumerable<IndexerRequest>> pageableRequests)
|
||||
{
|
||||
var releases = new List<ReleaseInfo>();
|
||||
var url = String.Empty;
|
||||
|
||||
var parser = GetParser();
|
||||
|
||||
try
|
||||
{
|
||||
foreach (var pageableRequest in pageableRequests)
|
||||
{
|
||||
var pagedReleases = new List<ReleaseInfo>();
|
||||
|
||||
foreach (var request in pageableRequest)
|
||||
{
|
||||
url = request.Url.ToString();
|
||||
|
||||
var page = FetchPage(request, parser);
|
||||
|
||||
pagedReleases.AddRange(page);
|
||||
|
||||
if (!IsFullPage(page) || pagedReleases.Count >= MaxNumResultsPerQuery)
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
releases.AddRange(pagedReleases);
|
||||
}
|
||||
}
|
||||
catch (WebException webException)
|
||||
{
|
||||
if (webException.Message.Contains("502") || webException.Message.Contains("503") ||
|
||||
webException.Message.Contains("timed out"))
|
||||
{
|
||||
_logger.Warn("{0} server is currently unavailable. {1} {2}", this, url, webException.Message);
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.Warn("{0} {1} {2}", this, url, webException.Message);
|
||||
}
|
||||
}
|
||||
catch (RequestLimitReachedException)
|
||||
{
|
||||
// TODO: Backoff for x period.
|
||||
_logger.Warn("API Request Limit reached for {0}", this);
|
||||
}
|
||||
catch (ApiKeyException)
|
||||
{
|
||||
_logger.Warn("Invalid API Key for {0} {1}", this, url);
|
||||
}
|
||||
catch (Exception feedEx)
|
||||
{
|
||||
feedEx.Data.Add("FeedUrl", url);
|
||||
_logger.ErrorException("An error occurred while processing feed. " + url, feedEx);
|
||||
}
|
||||
|
||||
return CleanupReleases(releases);
|
||||
}
|
||||
|
||||
protected virtual Boolean IsFullPage(IList<ReleaseInfo> page)
|
||||
{
|
||||
return PageSize != 0 && page.Count >= PageSize;
|
||||
}
|
||||
|
||||
protected virtual IList<ReleaseInfo> FetchPage(IndexerRequest request, IParseIndexerResponse parser)
|
||||
{
|
||||
var url = request.Url;
|
||||
|
||||
_logger.Debug("Downloading Feed " + request.Url);
|
||||
var response = new IndexerResponse(request, _httpClient.Execute(request.HttpRequest));
|
||||
|
||||
if (response.HttpResponse.Headers.ContentType != null && response.HttpResponse.Headers.ContentType.Contains("text/html") &&
|
||||
request.HttpRequest.Headers.Accept != null && !request.HttpRequest.Headers.Accept.Contains("text/html"))
|
||||
{
|
||||
throw new WebException("Indexer responded with html content. Site is likely blocked or unavailable.");
|
||||
}
|
||||
|
||||
return parser.ParseResponse(response).ToList();
|
||||
}
|
||||
|
||||
protected override void Test(List<ValidationFailure> failures)
|
||||
{
|
||||
failures.AddIfNotNull(TestConnection());
|
||||
}
|
||||
|
||||
protected virtual ValidationFailure TestConnection()
|
||||
{
|
||||
// TODO: This doesn't even work coz those exceptions get catched.
|
||||
try
|
||||
{
|
||||
var releases = FetchRecent();
|
||||
|
||||
if (releases.Any()) return null;
|
||||
}
|
||||
catch (ApiKeyException)
|
||||
{
|
||||
_logger.Warn("Indexer returned result for RSS URL, API Key appears to be invalid");
|
||||
|
||||
return new ValidationFailure("ApiKey", "Invalid API Key");
|
||||
}
|
||||
catch (RequestLimitReachedException)
|
||||
{
|
||||
_logger.Warn("Request limit reached");
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.WarnException("Unable to connect to indexer: " + ex.Message, ex);
|
||||
|
||||
return new ValidationFailure("Url", "Unable to connect to indexer, check the log for more details");
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -1,23 +1,22 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using NzbDrone.Core.IndexerSearch.Definitions;
|
||||
using NzbDrone.Core.Parser.Model;
|
||||
using NzbDrone.Core.ThingiProvider;
|
||||
|
||||
namespace NzbDrone.Core.Indexers
|
||||
{
|
||||
public interface IIndexer : IProvider
|
||||
{
|
||||
IParseFeed Parser { get; }
|
||||
DownloadProtocol Protocol { get; }
|
||||
Int32 SupportedPageSize { get; }
|
||||
Boolean SupportsPaging { get; }
|
||||
Boolean SupportsRss { get; }
|
||||
Boolean SupportsSearch { get; }
|
||||
DownloadProtocol Protocol { get; }
|
||||
|
||||
IEnumerable<string> RecentFeed { get; }
|
||||
IEnumerable<string> GetEpisodeSearchUrls(List<String> titles, int tvRageId, int seasonNumber, int episodeNumber);
|
||||
IEnumerable<string> GetDailyEpisodeSearchUrls(List<String> titles, int tvRageId, DateTime date);
|
||||
IEnumerable<string> GetAnimeEpisodeSearchUrls(List<String> titles, int tvRageId, int absoluteEpisodeNumber);
|
||||
IEnumerable<string> GetSeasonSearchUrls(List<String> titles, int tvRageId, int seasonNumber, int offset);
|
||||
IEnumerable<string> GetSearchUrls(string query, int offset = 0);
|
||||
IList<ReleaseInfo> FetchRecent();
|
||||
IList<ReleaseInfo> Fetch(SeasonSearchCriteria searchCriteria);
|
||||
IList<ReleaseInfo> Fetch(SingleEpisodeSearchCriteria searchCriteria);
|
||||
IList<ReleaseInfo> Fetch(DailyEpisodeSearchCriteria searchCriteria);
|
||||
IList<ReleaseInfo> Fetch(AnimeEpisodeSearchCriteria searchCriteria);
|
||||
IList<ReleaseInfo> Fetch(SpecialEpisodeSearchCriteria searchCriteria);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,18 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using NzbDrone.Core.IndexerSearch.Definitions;
|
||||
|
||||
namespace NzbDrone.Core.Indexers
|
||||
{
|
||||
public interface IIndexerRequestGenerator
|
||||
{
|
||||
IList<IEnumerable<IndexerRequest>> GetRecentRequests();
|
||||
IList<IEnumerable<IndexerRequest>> GetSearchRequests(SingleEpisodeSearchCriteria searchCriteria);
|
||||
IList<IEnumerable<IndexerRequest>> GetSearchRequests(SeasonSearchCriteria searchCriteria);
|
||||
IList<IEnumerable<IndexerRequest>> GetSearchRequests(DailyEpisodeSearchCriteria searchCriteria);
|
||||
IList<IEnumerable<IndexerRequest>> GetSearchRequests(AnimeEpisodeSearchCriteria searchCriteria);
|
||||
IList<IEnumerable<IndexerRequest>> GetSearchRequests(SpecialEpisodeSearchCriteria searchCriteria);
|
||||
}
|
||||
}
|
|
@ -1,10 +0,0 @@
|
|||
using System.Collections.Generic;
|
||||
using NzbDrone.Core.Parser.Model;
|
||||
|
||||
namespace NzbDrone.Core.Indexers
|
||||
{
|
||||
public interface IParseFeed
|
||||
{
|
||||
IEnumerable<ReleaseInfo> Process(string xml, string url);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using NzbDrone.Core.Parser.Model;
|
||||
|
||||
namespace NzbDrone.Core.Indexers
|
||||
{
|
||||
public interface IParseIndexerResponse
|
||||
{
|
||||
IList<ReleaseInfo> ParseResponse(IndexerResponse indexerResponse);
|
||||
}
|
||||
}
|
|
@ -1,18 +1,40 @@
|
|||
using System;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using FluentValidation.Results;
|
||||
using NLog;
|
||||
using NzbDrone.Common;
|
||||
using NzbDrone.Core.Configuration;
|
||||
using NzbDrone.Core.IndexerSearch.Definitions;
|
||||
using NzbDrone.Core.Parser;
|
||||
using NzbDrone.Core.Parser.Model;
|
||||
using NzbDrone.Core.ThingiProvider;
|
||||
|
||||
namespace NzbDrone.Core.Indexers
|
||||
{
|
||||
public abstract class IndexerBase<TSettings> : IIndexer where TSettings : IProviderConfig, new()
|
||||
public abstract class IndexerBase<TSettings> : IIndexer
|
||||
where TSettings : IProviderConfig, new()
|
||||
{
|
||||
protected readonly IConfigService _configService;
|
||||
protected readonly IParsingService _parsingService;
|
||||
protected readonly Logger _logger;
|
||||
|
||||
public abstract DownloadProtocol Protocol { get; }
|
||||
|
||||
public abstract Boolean SupportsRss { get; }
|
||||
public abstract Boolean SupportsSearch { get; }
|
||||
|
||||
public IndexerBase(IConfigService configService, IParsingService parsingService, Logger logger)
|
||||
{
|
||||
_configService = configService;
|
||||
_parsingService = parsingService;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public Type ConfigContract
|
||||
{
|
||||
get
|
||||
{
|
||||
return typeof(TSettings);
|
||||
}
|
||||
get { return typeof(TSettings); }
|
||||
}
|
||||
|
||||
public virtual IEnumerable<ProviderDefinition> DefaultDefinitions
|
||||
|
@ -24,7 +46,7 @@ namespace NzbDrone.Core.Indexers
|
|||
yield return new IndexerDefinition
|
||||
{
|
||||
Name = GetType().Name,
|
||||
EnableRss = config.Validate().IsValid,
|
||||
EnableRss = config.Validate().IsValid && SupportsRss,
|
||||
EnableSearch = config.Validate().IsValid && SupportsSearch,
|
||||
Implementation = GetType().Name,
|
||||
Settings = config
|
||||
|
@ -34,14 +56,6 @@ namespace NzbDrone.Core.Indexers
|
|||
|
||||
public virtual ProviderDefinition Definition { get; set; }
|
||||
|
||||
public abstract ValidationResult Test();
|
||||
public abstract DownloadProtocol Protocol { get; }
|
||||
|
||||
public virtual Boolean SupportsRss { get { return true; } }
|
||||
public virtual Boolean SupportsSearch { get { return true; } }
|
||||
public virtual Int32 SupportedPageSize { get { return 0; } }
|
||||
public bool SupportsPaging { get { return SupportedPageSize > 0; } }
|
||||
|
||||
protected TSettings Settings
|
||||
{
|
||||
get
|
||||
|
@ -50,14 +64,44 @@ namespace NzbDrone.Core.Indexers
|
|||
}
|
||||
}
|
||||
|
||||
public virtual IParseFeed Parser { get; private set; }
|
||||
public abstract IList<ReleaseInfo> FetchRecent();
|
||||
public abstract IList<ReleaseInfo> Fetch(SeasonSearchCriteria searchCriteria);
|
||||
public abstract IList<ReleaseInfo> Fetch(SingleEpisodeSearchCriteria searchCriteria);
|
||||
public abstract IList<ReleaseInfo> Fetch(DailyEpisodeSearchCriteria searchCriteria);
|
||||
public abstract IList<ReleaseInfo> Fetch(AnimeEpisodeSearchCriteria searchCriteria);
|
||||
public abstract IList<ReleaseInfo> Fetch(SpecialEpisodeSearchCriteria searchCriteria);
|
||||
|
||||
public abstract IEnumerable<string> RecentFeed { get; }
|
||||
public abstract IEnumerable<string> GetEpisodeSearchUrls(List<String> titles, int tvRageId, int seasonNumber, int episodeNumber);
|
||||
public abstract IEnumerable<string> GetDailyEpisodeSearchUrls(List<String> titles, int tvRageId, DateTime date);
|
||||
public abstract IEnumerable<string> GetAnimeEpisodeSearchUrls(List<String> titles, int tvRageId, int absoluteEpisodeNumber);
|
||||
public abstract IEnumerable<string> GetSeasonSearchUrls(List<String> titles, int tvRageId, int seasonNumber, int offset);
|
||||
public abstract IEnumerable<string> GetSearchUrls(string query, int offset);
|
||||
protected virtual IList<ReleaseInfo> CleanupReleases(IEnumerable<ReleaseInfo> releases)
|
||||
{
|
||||
var result = releases.DistinctBy(v => v.Guid).ToList();
|
||||
|
||||
result.ForEach(c =>
|
||||
{
|
||||
c.Indexer = Definition.Name;
|
||||
c.DownloadProtocol = Protocol;
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public ValidationResult Test()
|
||||
{
|
||||
var failures = new List<ValidationFailure>();
|
||||
|
||||
try
|
||||
{
|
||||
Test(failures);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.ErrorException("Test aborted due to exception", ex);
|
||||
failures.Add(new ValidationFailure(string.Empty, "Test was aborted due to an error: " + ex.Message));
|
||||
}
|
||||
|
||||
return new ValidationResult(failures);
|
||||
}
|
||||
|
||||
protected abstract void Test(List<ValidationFailure> failures);
|
||||
|
||||
public override string ToString()
|
||||
{
|
||||
|
|
|
@ -1,197 +0,0 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Net;
|
||||
using NLog;
|
||||
using NzbDrone.Common;
|
||||
using NzbDrone.Common.Http;
|
||||
using NzbDrone.Core.Indexers.Exceptions;
|
||||
using NzbDrone.Core.IndexerSearch.Definitions;
|
||||
using NzbDrone.Core.Parser.Model;
|
||||
using System.Linq;
|
||||
|
||||
namespace NzbDrone.Core.Indexers
|
||||
{
|
||||
public interface IFetchFeedFromIndexers
|
||||
{
|
||||
IList<ReleaseInfo> FetchRss(IIndexer indexer);
|
||||
IList<ReleaseInfo> Fetch(IIndexer indexer, SeasonSearchCriteria searchCriteria);
|
||||
IList<ReleaseInfo> Fetch(IIndexer indexer, SingleEpisodeSearchCriteria searchCriteria);
|
||||
IList<ReleaseInfo> Fetch(IIndexer indexer, DailyEpisodeSearchCriteria searchCriteria);
|
||||
IList<ReleaseInfo> Fetch(IIndexer indexer, AnimeEpisodeSearchCriteria searchCriteria);
|
||||
IList<ReleaseInfo> Fetch(IIndexer indexer, SpecialEpisodeSearchCriteria searchCriteria);
|
||||
}
|
||||
|
||||
public class FetchFeedService : IFetchFeedFromIndexers
|
||||
{
|
||||
private readonly Logger _logger;
|
||||
private readonly IHttpClient _httpClient;
|
||||
|
||||
public FetchFeedService(IHttpClient httpClient, Logger logger)
|
||||
{
|
||||
_httpClient = httpClient;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public virtual IList<ReleaseInfo> FetchRss(IIndexer indexer)
|
||||
{
|
||||
_logger.Debug("Fetching feeds from " + indexer);
|
||||
|
||||
var result = Fetch(indexer, indexer.RecentFeed);
|
||||
|
||||
_logger.Debug("Finished processing feeds from {0} found {1} releases", indexer, result.Count);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public IList<ReleaseInfo> Fetch(IIndexer indexer, SeasonSearchCriteria searchCriteria)
|
||||
{
|
||||
return Fetch(indexer, searchCriteria, 0).DistinctBy(c => c.DownloadUrl).ToList();
|
||||
}
|
||||
|
||||
private IList<ReleaseInfo> Fetch(IIndexer indexer, SeasonSearchCriteria searchCriteria, int offset)
|
||||
{
|
||||
var searchUrls = indexer.GetSeasonSearchUrls(searchCriteria.QueryTitles, searchCriteria.Series.TvRageId, searchCriteria.SeasonNumber, offset).ToList();
|
||||
|
||||
if (searchUrls.Any())
|
||||
{
|
||||
_logger.Debug("Searching for {0} offset: {1}", searchCriteria, offset);
|
||||
|
||||
var result = Fetch(indexer, searchUrls);
|
||||
|
||||
_logger.Info("{0} offset {1}. Found {2}", indexer, offset, result.Count);
|
||||
|
||||
if (indexer.SupportsPaging && result.Count >= indexer.SupportedPageSize && offset < 900)
|
||||
{
|
||||
result.AddRange(Fetch(indexer, searchCriteria, offset + indexer.SupportedPageSize));
|
||||
}
|
||||
|
||||
//Only log finish for the first call to this recursive method
|
||||
if (offset == 0)
|
||||
{
|
||||
_logger.Info("Finished searching {0} for {1}. Found {2}", indexer, searchCriteria, result.Count);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
return new List<ReleaseInfo>();
|
||||
}
|
||||
|
||||
public IList<ReleaseInfo> Fetch(IIndexer indexer, SingleEpisodeSearchCriteria searchCriteria)
|
||||
{
|
||||
var searchUrls = indexer.GetEpisodeSearchUrls(searchCriteria.QueryTitles, searchCriteria.Series.TvRageId, searchCriteria.SeasonNumber, searchCriteria.EpisodeNumber).ToList();
|
||||
return Fetch(indexer, searchUrls, searchCriteria);
|
||||
}
|
||||
|
||||
public IList<ReleaseInfo> Fetch(IIndexer indexer, DailyEpisodeSearchCriteria searchCriteria)
|
||||
{
|
||||
var searchUrls = indexer.GetDailyEpisodeSearchUrls(searchCriteria.QueryTitles, searchCriteria.Series.TvRageId, searchCriteria.AirDate).ToList();
|
||||
return Fetch(indexer, searchUrls, searchCriteria);
|
||||
}
|
||||
|
||||
public IList<ReleaseInfo> Fetch(IIndexer indexer, AnimeEpisodeSearchCriteria searchCriteria)
|
||||
{
|
||||
var searchUrls = indexer.GetAnimeEpisodeSearchUrls(searchCriteria.SceneTitles, searchCriteria.Series.TvRageId, searchCriteria.AbsoluteEpisodeNumber).ToList();
|
||||
return Fetch(indexer, searchUrls, searchCriteria);
|
||||
}
|
||||
|
||||
public IList<ReleaseInfo> Fetch(IIndexer indexer, SpecialEpisodeSearchCriteria searchCriteria)
|
||||
{
|
||||
var searchUrls = new List<String>();
|
||||
|
||||
foreach (var episodeQueryTitle in searchCriteria.EpisodeQueryTitles)
|
||||
{
|
||||
var urls = indexer.GetSearchUrls(episodeQueryTitle).ToList();
|
||||
|
||||
if (urls.Any())
|
||||
{
|
||||
_logger.Debug("Performing query of {0} for {1}", indexer, episodeQueryTitle);
|
||||
searchUrls.AddRange(urls);
|
||||
}
|
||||
}
|
||||
|
||||
return Fetch(indexer, searchUrls, searchCriteria);
|
||||
}
|
||||
|
||||
private List<ReleaseInfo> Fetch(IIndexer indexer, IEnumerable<string> urls, SearchCriteriaBase searchCriteria)
|
||||
{
|
||||
var urlList = urls.ToList();
|
||||
|
||||
if (urlList.Empty())
|
||||
{
|
||||
return new List<ReleaseInfo>();
|
||||
}
|
||||
|
||||
_logger.Debug("Searching for {0}", searchCriteria);
|
||||
|
||||
var result = Fetch(indexer, urlList);
|
||||
|
||||
_logger.Info("Finished searching {0} for {1}. Found {2}", indexer, searchCriteria, result.Count);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private List<ReleaseInfo> Fetch(IIndexer indexer, IEnumerable<string> urls)
|
||||
{
|
||||
var result = new List<ReleaseInfo>();
|
||||
|
||||
foreach (var url in urls)
|
||||
{
|
||||
try
|
||||
{
|
||||
_logger.Debug("Downloading Feed " + url);
|
||||
var request = new HttpRequest(url);
|
||||
request.Headers.Accept = "text/xml, text/rss+xml, application/rss+xml";
|
||||
var response = _httpClient.Get(request);
|
||||
|
||||
if (response.Headers.ContentType != null && response.Headers.ContentType.Split(';')[0] == "text/html")
|
||||
{
|
||||
throw new WebException("Indexer responded with html content. Site is likely blocked or unavailable.");
|
||||
}
|
||||
|
||||
var xml = response.Content;
|
||||
if (!string.IsNullOrWhiteSpace(xml))
|
||||
{
|
||||
result.AddRange(indexer.Parser.Process(xml, url));
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.Warn("{0} returned empty response.", url);
|
||||
}
|
||||
|
||||
}
|
||||
catch (WebException webException)
|
||||
{
|
||||
if (webException.Message.Contains("502") || webException.Message.Contains("503") ||
|
||||
webException.Message.Contains("timed out"))
|
||||
{
|
||||
_logger.Warn("{0} server is currently unavailable. {1} {2}", indexer, url, webException.Message);
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.Warn("{0} {1} {2}", indexer, url, webException.Message);
|
||||
}
|
||||
}
|
||||
catch (ApiKeyException)
|
||||
{
|
||||
_logger.Warn("Invalid API Key for {0} {1}", indexer, url);
|
||||
}
|
||||
catch (Exception feedEx)
|
||||
{
|
||||
feedEx.Data.Add("FeedUrl", url);
|
||||
_logger.ErrorException("An error occurred while processing feed. " + url, feedEx);
|
||||
}
|
||||
}
|
||||
|
||||
result = result.DistinctBy(v => v.Guid).ToList();
|
||||
|
||||
result.ForEach(c =>
|
||||
{
|
||||
c.Indexer = indexer.Definition.Name;
|
||||
c.DownloadProtocol = indexer.Protocol;
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,60 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using NzbDrone.Common.Http;
|
||||
|
||||
namespace NzbDrone.Core.Indexers
|
||||
{
|
||||
public class IndexerRequest
|
||||
{
|
||||
public HttpRequest HttpRequest { get; private set; }
|
||||
|
||||
public IndexerRequest(String url, HttpAccept httpAccept)
|
||||
{
|
||||
HttpRequest = new HttpRequest(url, httpAccept);
|
||||
}
|
||||
|
||||
public IndexerRequest(HttpRequest httpRequest)
|
||||
{
|
||||
HttpRequest = httpRequest;
|
||||
}
|
||||
|
||||
public Uri Url
|
||||
{
|
||||
get { return HttpRequest.Url; }
|
||||
}
|
||||
}
|
||||
|
||||
public class IndexerResponse
|
||||
{
|
||||
private readonly IndexerRequest _indexerRequest;
|
||||
private readonly HttpResponse _httpResponse;
|
||||
|
||||
public IndexerResponse(IndexerRequest indexerRequest, HttpResponse httpResponse)
|
||||
{
|
||||
_indexerRequest = indexerRequest;
|
||||
_httpResponse = httpResponse;
|
||||
}
|
||||
|
||||
public IndexerRequest Request
|
||||
{
|
||||
get { return _indexerRequest; }
|
||||
}
|
||||
|
||||
public HttpRequest HttpRequest
|
||||
{
|
||||
get { return _httpResponse.Request; }
|
||||
}
|
||||
|
||||
public HttpResponse HttpResponse
|
||||
{
|
||||
get { return _httpResponse; }
|
||||
}
|
||||
|
||||
public String Content
|
||||
{
|
||||
get { return _httpResponse.Content; }
|
||||
}
|
||||
}
|
||||
}
|
|
@ -5,185 +5,49 @@ using FluentValidation.Results;
|
|||
using NLog;
|
||||
using NzbDrone.Common;
|
||||
using NzbDrone.Common.Http;
|
||||
using NzbDrone.Core.Configuration;
|
||||
using NzbDrone.Core.Indexers.Exceptions;
|
||||
using NzbDrone.Core.Parser;
|
||||
using NzbDrone.Core.ThingiProvider;
|
||||
|
||||
namespace NzbDrone.Core.Indexers.Newznab
|
||||
{
|
||||
public class Newznab : IndexerBase<NewznabSettings>
|
||||
public class Newznab : HttpIndexerBase<NewznabSettings>
|
||||
{
|
||||
private readonly IFetchFeedFromIndexers _feedFetcher;
|
||||
private readonly HttpProvider _httpProvider;
|
||||
private readonly Logger _logger;
|
||||
|
||||
public Newznab(IFetchFeedFromIndexers feedFetcher, HttpProvider httpProvider, Logger logger)
|
||||
{
|
||||
_feedFetcher = feedFetcher;
|
||||
_httpProvider = httpProvider;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
//protected so it can be mocked, but not used for DI
|
||||
//TODO: Is there a better way to achieve this?
|
||||
protected Newznab()
|
||||
{
|
||||
}
|
||||
|
||||
public override DownloadProtocol Protocol { get { return DownloadProtocol.Usenet; } }
|
||||
public override Int32 SupportedPageSize { get { return 100; } }
|
||||
public override Int32 PageSize { get { return 100; } }
|
||||
|
||||
public override IParseFeed Parser
|
||||
public override IIndexerRequestGenerator GetRequestGenerator()
|
||||
{
|
||||
get
|
||||
return new NewznabRequestGenerator()
|
||||
{
|
||||
return new NewznabParser();
|
||||
}
|
||||
PageSize = PageSize,
|
||||
Settings = Settings
|
||||
};
|
||||
}
|
||||
|
||||
public override IParseIndexerResponse GetParser()
|
||||
{
|
||||
return new NewznabRssParser();
|
||||
}
|
||||
|
||||
public override IEnumerable<ProviderDefinition> DefaultDefinitions
|
||||
{
|
||||
get
|
||||
{
|
||||
var list = new List<IndexerDefinition>();
|
||||
|
||||
list.Add(GetDefinition("Nzbs.org", GetSettings("http://nzbs.org", 5000)));
|
||||
list.Add(GetDefinition("Nzb.su", GetSettings("https://api.nzb.su")));
|
||||
list.Add(GetDefinition("Dognzb.cr", GetSettings("https://api.dognzb.cr")));
|
||||
list.Add(GetDefinition("OZnzb.com", GetSettings("https://www.oznzb.com")));
|
||||
list.Add(GetDefinition("nzbplanet.net", GetSettings("https://nzbplanet.net")));
|
||||
list.Add(GetDefinition("NZBgeek", GetSettings("https://api.nzbgeek.info")));
|
||||
|
||||
return list;
|
||||
yield return GetDefinition("Nzbs.org", GetSettings("http://nzbs.org", 5000));
|
||||
yield return GetDefinition("Nzb.su", GetSettings("https://api.nzb.su"));
|
||||
yield return GetDefinition("Dognzb.cr", GetSettings("https://api.dognzb.cr"));
|
||||
yield return GetDefinition("OZnzb.com", GetSettings("https://www.oznzb.com"));
|
||||
yield return GetDefinition("nzbplanet.net", GetSettings("https://nzbplanet.net"));
|
||||
yield return GetDefinition("NZBgeek", GetSettings("https://api.nzbgeek.info"));
|
||||
}
|
||||
}
|
||||
|
||||
public override ProviderDefinition Definition { get; set; }
|
||||
|
||||
public override IEnumerable<string> RecentFeed
|
||||
public Newznab(IHttpClient httpClient, IConfigService configService, IParsingService parsingService, Logger logger)
|
||||
: base(httpClient, configService, parsingService, logger)
|
||||
{
|
||||
get
|
||||
{
|
||||
var categories = String.Join(",", Settings.Categories.Concat(Settings.AnimeCategories));
|
||||
|
||||
var url = String.Format("{0}/api?t=tvsearch&cat={1}&extended=1{2}", Settings.Url.TrimEnd('/'), categories, Settings.AdditionalParameters);
|
||||
|
||||
if (!String.IsNullOrWhiteSpace(Settings.ApiKey))
|
||||
{
|
||||
url += "&apikey=" + Settings.ApiKey;
|
||||
}
|
||||
|
||||
yield return url;
|
||||
}
|
||||
}
|
||||
|
||||
public override IEnumerable<string> GetEpisodeSearchUrls(List<String> titles, int tvRageId, int seasonNumber, int episodeNumber)
|
||||
{
|
||||
if (Settings.Categories.Empty())
|
||||
{
|
||||
return Enumerable.Empty<String>();
|
||||
}
|
||||
|
||||
if (tvRageId > 0)
|
||||
{
|
||||
return RecentFeed.Select(url => String.Format("{0}&limit=100&rid={1}&season={2}&ep={3}", url, tvRageId, seasonNumber, episodeNumber));
|
||||
}
|
||||
|
||||
return titles.SelectMany(title =>
|
||||
RecentFeed.Select(url =>
|
||||
String.Format("{0}&limit=100&q={1}&season={2}&ep={3}",
|
||||
url, NewsnabifyTitle(title), seasonNumber, episodeNumber)));
|
||||
}
|
||||
|
||||
public override IEnumerable<string> GetDailyEpisodeSearchUrls(List<String> titles, int tvRageId, DateTime date)
|
||||
{
|
||||
if (Settings.Categories.Empty())
|
||||
{
|
||||
return Enumerable.Empty<String>();
|
||||
}
|
||||
|
||||
if (tvRageId > 0)
|
||||
{
|
||||
return RecentFeed.Select(url => String.Format("{0}&limit=100&rid={1}&season={2:yyyy}&ep={2:MM}/{2:dd}", url, tvRageId, date)).ToList();
|
||||
}
|
||||
|
||||
return titles.SelectMany(title =>
|
||||
RecentFeed.Select(url =>
|
||||
String.Format("{0}&limit=100&q={1}&season={2:yyyy}&ep={2:MM}/{2:dd}",
|
||||
url, NewsnabifyTitle(title), date)).ToList());
|
||||
}
|
||||
|
||||
public override IEnumerable<string> GetAnimeEpisodeSearchUrls(List<String> titles, int tvRageId, int absoluteEpisodeNumber)
|
||||
{
|
||||
if (Settings.AnimeCategories.Empty())
|
||||
{
|
||||
return Enumerable.Empty<String>();
|
||||
}
|
||||
|
||||
return titles.SelectMany(title =>
|
||||
RecentFeed.Select(url =>
|
||||
String.Format("{0}&limit=100&q={1}+{2:00}",
|
||||
url.Replace("t=tvsearch", "t=search"), NewsnabifyTitle(title), absoluteEpisodeNumber)));
|
||||
}
|
||||
|
||||
public override IEnumerable<string> GetSeasonSearchUrls(List<String> titles, int tvRageId, int seasonNumber, int offset)
|
||||
{
|
||||
if (Settings.Categories.Empty())
|
||||
{
|
||||
return Enumerable.Empty<String>();
|
||||
}
|
||||
|
||||
if (tvRageId > 0)
|
||||
{
|
||||
return RecentFeed.Select(url => String.Format("{0}&limit=100&rid={1}&season={2}&offset={3}", url, tvRageId, seasonNumber, offset));
|
||||
}
|
||||
|
||||
return titles.SelectMany(title =>
|
||||
RecentFeed.Select(url =>
|
||||
String.Format("{0}&limit=100&q={1}&season={2}&offset={3}",
|
||||
url, NewsnabifyTitle(title), seasonNumber, offset)));
|
||||
}
|
||||
|
||||
public override IEnumerable<string> GetSearchUrls(string query, int offset)
|
||||
{
|
||||
// encode query (replace the + with spaces first)
|
||||
query = query.Replace("+", " ");
|
||||
query = System.Web.HttpUtility.UrlEncode(query);
|
||||
return RecentFeed.Select(url => String.Format("{0}&offset={1}&limit=100&q={2}", url.Replace("t=tvsearch", "t=search"), offset, query));
|
||||
}
|
||||
|
||||
public override ValidationResult Test()
|
||||
{
|
||||
var releases = _feedFetcher.FetchRss(this);
|
||||
|
||||
if (releases.Any()) return new ValidationResult();
|
||||
|
||||
try
|
||||
{
|
||||
var url = RecentFeed.First();
|
||||
var xml = _httpProvider.DownloadString(url);
|
||||
|
||||
NewznabPreProcessor.Process(xml, url);
|
||||
}
|
||||
catch (ApiKeyException)
|
||||
{
|
||||
_logger.Warn("Indexer returned result for Newznab RSS URL, API Key appears to be invalid");
|
||||
|
||||
var apiKeyFailure = new ValidationFailure("ApiKey", "Invalid API Key");
|
||||
return new ValidationResult(new List<ValidationFailure> { apiKeyFailure });
|
||||
}
|
||||
catch (RequestLimitReachedException)
|
||||
{
|
||||
_logger.Warn("Request limit reached");
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.WarnException("Unable to connect to indexer: " + ex.Message, ex);
|
||||
|
||||
var failure = new ValidationFailure("Url", "Unable to connect to indexer, check the log for more details");
|
||||
return new ValidationResult(new List<ValidationFailure> { failure });
|
||||
}
|
||||
|
||||
return new ValidationResult();
|
||||
}
|
||||
|
||||
private IndexerDefinition GetDefinition(String name, NewznabSettings settings)
|
||||
|
@ -212,10 +76,5 @@ namespace NzbDrone.Core.Indexers.Newznab
|
|||
|
||||
return settings;
|
||||
}
|
||||
|
||||
private static string NewsnabifyTitle(string title)
|
||||
{
|
||||
return title.Replace("+", "%20");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,95 +0,0 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Xml.Linq;
|
||||
using NzbDrone.Core.Parser.Model;
|
||||
using System.Globalization;
|
||||
|
||||
namespace NzbDrone.Core.Indexers.Newznab
|
||||
{
|
||||
public class NewznabParser : RssParserBase
|
||||
{
|
||||
|
||||
private static readonly string[] IgnoredErrors =
|
||||
{
|
||||
"Request limit reached",
|
||||
};
|
||||
|
||||
protected override string GetNzbInfoUrl(XElement item)
|
||||
{
|
||||
return item.Comments().Replace("#comments", "");
|
||||
}
|
||||
|
||||
protected override DateTime GetPublishDate(XElement item)
|
||||
{
|
||||
var attributes = item.Elements("attr").ToList();
|
||||
var usenetdateElement = attributes.SingleOrDefault(e => e.Attribute("name").Value.Equals("usenetdate", StringComparison.CurrentCultureIgnoreCase));
|
||||
|
||||
if (usenetdateElement != null)
|
||||
{
|
||||
var dateString = usenetdateElement.Attribute("value").Value;
|
||||
|
||||
return XElementExtensions.ParseDate(dateString);
|
||||
}
|
||||
|
||||
return base.GetPublishDate(item);
|
||||
}
|
||||
|
||||
protected override long GetSize(XElement item)
|
||||
{
|
||||
var attributes = item.Elements("attr").ToList();
|
||||
var sizeElement = attributes.SingleOrDefault(e => e.Attribute("name").Value.Equals("size", StringComparison.CurrentCultureIgnoreCase));
|
||||
|
||||
if (sizeElement != null)
|
||||
{
|
||||
return Convert.ToInt64(sizeElement.Attribute("value").Value);
|
||||
}
|
||||
|
||||
return ParseSize(item.Description(), true);
|
||||
}
|
||||
|
||||
public override IEnumerable<ReleaseInfo> Process(string xml, string url)
|
||||
{
|
||||
try
|
||||
{
|
||||
return base.Process(xml, url);
|
||||
}
|
||||
catch (NewznabException e)
|
||||
{
|
||||
if (!IgnoredErrors.Any(ignoredError => e.Message.Contains(ignoredError)))
|
||||
{
|
||||
throw;
|
||||
}
|
||||
_logger.Error(e.Message);
|
||||
return new List<ReleaseInfo>();
|
||||
}
|
||||
}
|
||||
|
||||
protected override ReleaseInfo PostProcessor(XElement item, ReleaseInfo currentResult)
|
||||
{
|
||||
if (currentResult != null)
|
||||
{
|
||||
var attributes = item.Elements("attr").ToList();
|
||||
|
||||
var rageIdElement = attributes.SingleOrDefault(e => e.Attribute("name").Value.Equals("rageid", StringComparison.CurrentCultureIgnoreCase));
|
||||
|
||||
if (rageIdElement != null)
|
||||
{
|
||||
int tvRageId;
|
||||
|
||||
if (Int32.TryParse(rageIdElement.Attribute("value").Value, out tvRageId))
|
||||
{
|
||||
currentResult.TvRageId = tvRageId;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return currentResult;
|
||||
}
|
||||
|
||||
protected override void PreProcess(string source, string url)
|
||||
{
|
||||
NewznabPreProcessor.Process(source, url);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,35 +0,0 @@
|
|||
using System;
|
||||
using System.Linq;
|
||||
using System.Xml.Linq;
|
||||
using NzbDrone.Core.Indexers.Exceptions;
|
||||
|
||||
namespace NzbDrone.Core.Indexers.Newznab
|
||||
{
|
||||
public static class NewznabPreProcessor
|
||||
{
|
||||
public static void Process(string source, string url)
|
||||
{
|
||||
var xdoc = XDocument.Parse(source);
|
||||
var error = xdoc.Descendants("error").FirstOrDefault();
|
||||
|
||||
if (error == null) return;
|
||||
|
||||
var code = Convert.ToInt32(error.Attribute("code").Value);
|
||||
var errorMessage = error.Attribute("description").Value;
|
||||
|
||||
if (code >= 100 && code <= 199) throw new ApiKeyException("Invalid API key");
|
||||
|
||||
if (!url.Contains("apikey=") && errorMessage == "Missing parameter")
|
||||
{
|
||||
throw new ApiKeyException("Indexer requires an API key");
|
||||
}
|
||||
|
||||
if (errorMessage == "Request limit reached")
|
||||
{
|
||||
throw new RequestLimitReachedException("API limit reached");
|
||||
}
|
||||
|
||||
throw new NewznabException("Newznab error detected: {0}", errorMessage);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,176 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using NzbDrone.Common;
|
||||
using NzbDrone.Common.Http;
|
||||
using NzbDrone.Core.IndexerSearch.Definitions;
|
||||
|
||||
namespace NzbDrone.Core.Indexers.Newznab
|
||||
{
|
||||
public class NewznabRequestGenerator : IIndexerRequestGenerator
|
||||
{
|
||||
public Int32 MaxPages { get; set; }
|
||||
public Int32 PageSize { get; set; }
|
||||
public NewznabSettings Settings { get; set; }
|
||||
|
||||
public NewznabRequestGenerator()
|
||||
{
|
||||
MaxPages = 30;
|
||||
PageSize = 100;
|
||||
}
|
||||
|
||||
public virtual IList<IEnumerable<IndexerRequest>> GetRecentRequests()
|
||||
{
|
||||
var pageableRequests = new List<IEnumerable<IndexerRequest>>();
|
||||
|
||||
// TODO: We might consider getting multiple pages in the future, but atm we limit it to 1 page.
|
||||
pageableRequests.AddIfNotNull(GetPagedRequests(1, Settings.Categories.Concat(Settings.AnimeCategories), "tvsearch", ""));
|
||||
|
||||
return pageableRequests;
|
||||
}
|
||||
|
||||
public virtual IList<IEnumerable<IndexerRequest>> GetSearchRequests(SingleEpisodeSearchCriteria searchCriteria)
|
||||
{
|
||||
var pageableRequests = new List<IEnumerable<IndexerRequest>>();
|
||||
|
||||
if (searchCriteria.Series.TvRageId > 0)
|
||||
{
|
||||
pageableRequests.AddIfNotNull(GetPagedRequests(MaxPages, Settings.Categories, "tvsearch",
|
||||
String.Format("&rid={0}&season={1}&ep={2}",
|
||||
searchCriteria.Series.TvRageId,
|
||||
searchCriteria.SeasonNumber,
|
||||
searchCriteria.EpisodeNumber)));
|
||||
}
|
||||
else
|
||||
{
|
||||
foreach (var queryTitle in searchCriteria.QueryTitles)
|
||||
{
|
||||
pageableRequests.AddIfNotNull(GetPagedRequests(MaxPages, Settings.Categories, "tvsearch",
|
||||
String.Format("&q={0}&season={1}&ep={2}",
|
||||
NewsnabifyTitle(queryTitle),
|
||||
searchCriteria.SeasonNumber,
|
||||
searchCriteria.EpisodeNumber)));
|
||||
}
|
||||
}
|
||||
|
||||
return pageableRequests;
|
||||
}
|
||||
|
||||
public virtual IList<IEnumerable<IndexerRequest>> GetSearchRequests(SeasonSearchCriteria searchCriteria)
|
||||
{
|
||||
var pageableRequests = new List<IEnumerable<IndexerRequest>>();
|
||||
|
||||
if (searchCriteria.Series.TvRageId > 0)
|
||||
{
|
||||
pageableRequests.AddIfNotNull(GetPagedRequests(MaxPages, Settings.Categories, "tvsearch",
|
||||
String.Format("&rid={0}&season={1}",
|
||||
searchCriteria.Series.TvRageId,
|
||||
searchCriteria.SeasonNumber)));
|
||||
}
|
||||
else
|
||||
{
|
||||
foreach (var queryTitle in searchCriteria.QueryTitles)
|
||||
{
|
||||
pageableRequests.AddIfNotNull(GetPagedRequests(MaxPages, Settings.Categories, "tvsearch",
|
||||
String.Format("&q={0}&season={1}",
|
||||
NewsnabifyTitle(queryTitle),
|
||||
searchCriteria.SeasonNumber)));
|
||||
}
|
||||
}
|
||||
|
||||
return pageableRequests;
|
||||
}
|
||||
|
||||
public virtual IList<IEnumerable<IndexerRequest>> GetSearchRequests(DailyEpisodeSearchCriteria searchCriteria)
|
||||
{
|
||||
var pageableRequests = new List<IEnumerable<IndexerRequest>>();
|
||||
|
||||
if (searchCriteria.Series.TvRageId > 0)
|
||||
{
|
||||
pageableRequests.AddIfNotNull(GetPagedRequests(MaxPages, Settings.Categories, "tvsearch",
|
||||
String.Format("&rid={0}&season={1:yyyy}&ep={1:MM}/{1:dd}",
|
||||
searchCriteria.Series.TvRageId,
|
||||
searchCriteria.AirDate)));
|
||||
}
|
||||
else
|
||||
{
|
||||
foreach (var queryTitle in searchCriteria.QueryTitles)
|
||||
{
|
||||
pageableRequests.AddIfNotNull(GetPagedRequests(MaxPages, Settings.Categories, "tvsearch",
|
||||
String.Format("&q={0}&season={1:yyyy}&ep={1:MM}/{1:dd}",
|
||||
NewsnabifyTitle(queryTitle),
|
||||
searchCriteria.AirDate)));
|
||||
}
|
||||
}
|
||||
|
||||
return pageableRequests;
|
||||
}
|
||||
|
||||
public virtual IList<IEnumerable<IndexerRequest>> GetSearchRequests(AnimeEpisodeSearchCriteria searchCriteria)
|
||||
{
|
||||
var pageableRequests = new List<IEnumerable<IndexerRequest>>();
|
||||
|
||||
foreach (var queryTitle in searchCriteria.QueryTitles)
|
||||
{
|
||||
pageableRequests.AddIfNotNull(GetPagedRequests(MaxPages, Settings.AnimeCategories, "search",
|
||||
String.Format("&q={0}+{1:00}",
|
||||
NewsnabifyTitle(queryTitle),
|
||||
searchCriteria.AbsoluteEpisodeNumber)));
|
||||
}
|
||||
|
||||
return pageableRequests;
|
||||
}
|
||||
|
||||
public virtual IList<IEnumerable<IndexerRequest>> GetSearchRequests(SpecialEpisodeSearchCriteria searchCriteria)
|
||||
{
|
||||
var pageableRequests = new List<IEnumerable<IndexerRequest>>();
|
||||
|
||||
foreach (var queryTitle in searchCriteria.EpisodeQueryTitles)
|
||||
{
|
||||
var query = queryTitle.Replace('+', ' ');
|
||||
query = System.Web.HttpUtility.UrlEncode(query);
|
||||
|
||||
pageableRequests.AddIfNotNull(GetPagedRequests(MaxPages, Settings.Categories.Concat(Settings.AnimeCategories), "search",
|
||||
String.Format("&q={0}",
|
||||
query)));
|
||||
}
|
||||
|
||||
return pageableRequests;
|
||||
}
|
||||
|
||||
private IEnumerable<IndexerRequest> GetPagedRequests(Int32 maxPages, IEnumerable<Int32> categories, String searchType, String parameters)
|
||||
{
|
||||
if (categories.Empty())
|
||||
{
|
||||
yield break;
|
||||
}
|
||||
|
||||
var categoriesQuery = String.Join(",", categories.Distinct());
|
||||
|
||||
var baseUrl = String.Format("{0}/api?t={1}&cat={2}&extended=1{3}", Settings.Url.TrimEnd('/'), searchType, categoriesQuery, Settings.AdditionalParameters);
|
||||
|
||||
if (Settings.ApiKey.IsNotNullOrWhiteSpace())
|
||||
{
|
||||
baseUrl += "&apikey=" + Settings.ApiKey;
|
||||
}
|
||||
|
||||
if (PageSize == 0)
|
||||
{
|
||||
yield return new IndexerRequest(String.Format("{0}{1}", baseUrl, parameters), HttpAccept.Rss);
|
||||
}
|
||||
else
|
||||
{
|
||||
for (var page = 0; page < maxPages; page++)
|
||||
{
|
||||
yield return new IndexerRequest(String.Format("{0}&offset={1}&limit={2}{3}", baseUrl, page * PageSize, PageSize, parameters), HttpAccept.Rss);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static String NewsnabifyTitle(String title)
|
||||
{
|
||||
return title.Replace("+", "%20");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,112 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Xml.Linq;
|
||||
using NzbDrone.Common;
|
||||
using NzbDrone.Core.Indexers.Exceptions;
|
||||
using NzbDrone.Core.Parser.Model;
|
||||
|
||||
namespace NzbDrone.Core.Indexers.Newznab
|
||||
{
|
||||
public class NewznabRssParser : RssParser
|
||||
{
|
||||
public const String ns = "{http://www.newznab.com/DTD/2010/feeds/attributes/}";
|
||||
|
||||
protected override bool PreProcess(IndexerResponse indexerResponse)
|
||||
{
|
||||
var xdoc = XDocument.Parse(indexerResponse.Content);
|
||||
var error = xdoc.Descendants("error").FirstOrDefault();
|
||||
|
||||
if (error == null) return true;
|
||||
|
||||
var code = Convert.ToInt32(error.Attribute("code").Value);
|
||||
var errorMessage = error.Attribute("description").Value;
|
||||
|
||||
if (code >= 100 && code <= 199) throw new ApiKeyException("Invalid API key");
|
||||
|
||||
if (!indexerResponse.Request.Url.ToString().Contains("apikey=") && errorMessage == "Missing parameter")
|
||||
{
|
||||
throw new ApiKeyException("Indexer requires an API key");
|
||||
}
|
||||
|
||||
if (errorMessage == "Request limit reached")
|
||||
{
|
||||
throw new RequestLimitReachedException("API limit reached");
|
||||
}
|
||||
|
||||
throw new NewznabException("Newznab error detected: {0}", errorMessage);
|
||||
}
|
||||
|
||||
protected override ReleaseInfo ProcessItem(XElement item, ReleaseInfo releaseInfo)
|
||||
{
|
||||
releaseInfo = base.ProcessItem(item, releaseInfo);
|
||||
|
||||
releaseInfo.TvRageId = GetTvRageId(item);
|
||||
|
||||
return releaseInfo;
|
||||
}
|
||||
|
||||
|
||||
protected override String GetInfoUrl(XElement item)
|
||||
{
|
||||
return item.TryGetValue("comments").TrimEnd("#comments");
|
||||
}
|
||||
|
||||
protected override String GetCommentUrl(XElement item)
|
||||
{
|
||||
return item.TryGetValue("comments");
|
||||
}
|
||||
|
||||
protected override Int64 GetSize(XElement item)
|
||||
{
|
||||
Int64 size;
|
||||
|
||||
var sizeString = TryGetNewznabAttribute(item, "size");
|
||||
if (!sizeString.IsNullOrWhiteSpace() && Int64.TryParse(sizeString, out size))
|
||||
{
|
||||
return size;
|
||||
}
|
||||
|
||||
size = GetEnclosureLength(item);
|
||||
|
||||
return size;
|
||||
}
|
||||
|
||||
protected override DateTime GetPublishDate(XElement item)
|
||||
{
|
||||
var dateString = TryGetNewznabAttribute(item, "usenetdate");
|
||||
if (!dateString.IsNullOrWhiteSpace())
|
||||
{
|
||||
return XElementExtensions.ParseDate(dateString);
|
||||
}
|
||||
|
||||
return base.GetPublishDate(item);
|
||||
}
|
||||
|
||||
protected virtual Int32 GetTvRageId(XElement item)
|
||||
{
|
||||
var tvRageIdString = TryGetNewznabAttribute(item, "rageid");
|
||||
Int32 tvRageId;
|
||||
|
||||
if (!tvRageIdString.IsNullOrWhiteSpace() && Int32.TryParse(tvRageIdString, out tvRageId))
|
||||
{
|
||||
return tvRageId;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
protected String TryGetNewznabAttribute(XElement item, String key, String defaultValue = "")
|
||||
{
|
||||
var attr = item.Elements(ns + "attr").SingleOrDefault(e => e.Attribute("name").Value.Equals(key, StringComparison.CurrentCultureIgnoreCase));
|
||||
|
||||
if (attr != null)
|
||||
{
|
||||
return attr.Attribute("value").Value;
|
||||
}
|
||||
|
||||
return defaultValue;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,65 +0,0 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using FluentValidation;
|
||||
using FluentValidation.Results;
|
||||
using NLog;
|
||||
using NzbDrone.Common;
|
||||
using NzbDrone.Common.Http;
|
||||
using NzbDrone.Core.Indexers.Exceptions;
|
||||
using NzbDrone.Core.Indexers.Newznab;
|
||||
|
||||
namespace NzbDrone.Core.Indexers
|
||||
{
|
||||
public interface INewznabTestService
|
||||
{
|
||||
void Test(IIndexer indexer);
|
||||
}
|
||||
|
||||
public class NewznabTestService : INewznabTestService
|
||||
{
|
||||
private readonly IFetchFeedFromIndexers _feedFetcher;
|
||||
private readonly IHttpProvider _httpProvider;
|
||||
private readonly Logger _logger;
|
||||
|
||||
public NewznabTestService(IFetchFeedFromIndexers feedFetcher, IHttpProvider httpProvider, Logger logger)
|
||||
{
|
||||
_feedFetcher = feedFetcher;
|
||||
_httpProvider = httpProvider;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public void Test(IIndexer indexer)
|
||||
{
|
||||
var releases = _feedFetcher.FetchRss(indexer);
|
||||
|
||||
if (releases.Any()) return;
|
||||
|
||||
try
|
||||
{
|
||||
var url = indexer.RecentFeed.First();
|
||||
var xml = _httpProvider.DownloadString(url);
|
||||
|
||||
NewznabPreProcessor.Process(xml, url);
|
||||
}
|
||||
catch (ApiKeyException)
|
||||
{
|
||||
_logger.Warn("Indexer returned result for Newznab RSS URL, API Key appears to be invalid");
|
||||
|
||||
var apiKeyFailure = new ValidationFailure("ApiKey", "Invalid API Key");
|
||||
throw new ValidationException(new List<ValidationFailure> {apiKeyFailure}.ToArray());
|
||||
}
|
||||
catch (RequestLimitReachedException)
|
||||
{
|
||||
_logger.Warn("Request limit reached");
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.WarnException("Indexer doesn't appear to be Newznab based: " + ex.Message, ex);
|
||||
|
||||
var failure = new ValidationFailure("Url", "Invalid Newznab URL, check log for details");
|
||||
throw new ValidationException(new List<ValidationFailure> { failure }.ToArray());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -2,89 +2,31 @@
|
|||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using FluentValidation.Results;
|
||||
using NLog;
|
||||
using NzbDrone.Common.Http;
|
||||
using NzbDrone.Core.Configuration;
|
||||
using NzbDrone.Core.Parser;
|
||||
|
||||
namespace NzbDrone.Core.Indexers.Omgwtfnzbs
|
||||
{
|
||||
public class Omgwtfnzbs : IndexerBase<OmgwtfnzbsSettings>
|
||||
public class Omgwtfnzbs : HttpIndexerBase<OmgwtfnzbsSettings>
|
||||
{
|
||||
public override DownloadProtocol Protocol { get { return DownloadProtocol.Usenet; } }
|
||||
|
||||
public override IParseFeed Parser
|
||||
public Omgwtfnzbs(IHttpClient httpClient, IConfigService configService, IParsingService parsingService, Logger logger)
|
||||
: base(httpClient, configService, parsingService, logger)
|
||||
{
|
||||
get
|
||||
{
|
||||
return new OmgwtfnzbsParser();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public override IEnumerable<string> RecentFeed
|
||||
public override IIndexerRequestGenerator GetRequestGenerator()
|
||||
{
|
||||
get
|
||||
{
|
||||
yield return String.Format("http://rss.omgwtfnzbs.org/rss-search.php?catid=19,20&user={0}&api={1}&eng=1",
|
||||
Settings.Username, Settings.ApiKey);
|
||||
}
|
||||
return new OmgwtfnzbsRequestGenerator() { Settings = Settings };
|
||||
}
|
||||
|
||||
public override IEnumerable<string> GetEpisodeSearchUrls(List<String> titles, int tvRageId, int seasonNumber, int episodeNumber)
|
||||
public override IParseIndexerResponse GetParser()
|
||||
{
|
||||
var searchUrls = new List<string>();
|
||||
|
||||
foreach (var url in RecentFeed)
|
||||
{
|
||||
foreach (var title in titles)
|
||||
{
|
||||
searchUrls.Add(String.Format("{0}&search={1}+S{2:00}E{3:00}", url, title, seasonNumber, episodeNumber));
|
||||
}
|
||||
}
|
||||
|
||||
return searchUrls;
|
||||
}
|
||||
|
||||
public override IEnumerable<string> GetDailyEpisodeSearchUrls(List<String> titles, int tvRageId, DateTime date)
|
||||
{
|
||||
var searchUrls = new List<String>();
|
||||
|
||||
foreach (var url in RecentFeed)
|
||||
{
|
||||
foreach (var title in titles)
|
||||
{
|
||||
searchUrls.Add(String.Format("{0}&search={1}+{2:yyyy MM dd}", url, title, date));
|
||||
}
|
||||
}
|
||||
|
||||
return searchUrls;
|
||||
}
|
||||
|
||||
public override IEnumerable<string> GetAnimeEpisodeSearchUrls(List<String> titles, int tvRageId, int absoluteEpisodeNumber)
|
||||
{
|
||||
// TODO: Implement
|
||||
return new List<string>();
|
||||
}
|
||||
|
||||
public override IEnumerable<string> GetSeasonSearchUrls(List<String> titles, int tvRageId, int seasonNumber, int offset)
|
||||
{
|
||||
var searchUrls = new List<String>();
|
||||
|
||||
foreach (var url in RecentFeed)
|
||||
{
|
||||
foreach (var title in titles)
|
||||
{
|
||||
searchUrls.Add(String.Format("{0}&search={1}+S{2:00}", url, title, seasonNumber));
|
||||
}
|
||||
}
|
||||
|
||||
return searchUrls;
|
||||
}
|
||||
|
||||
public override IEnumerable<string> GetSearchUrls(string query, int offset)
|
||||
{
|
||||
return new List<string>();
|
||||
}
|
||||
|
||||
public override ValidationResult Test()
|
||||
{
|
||||
return new ValidationResult();
|
||||
return new OmgwtfnzbsRssParser();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,106 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using NzbDrone.Common;
|
||||
using NzbDrone.Common.Http;
|
||||
using NzbDrone.Core.IndexerSearch.Definitions;
|
||||
|
||||
namespace NzbDrone.Core.Indexers.Omgwtfnzbs
|
||||
{
|
||||
public class OmgwtfnzbsRequestGenerator : IIndexerRequestGenerator
|
||||
{
|
||||
public String BaseUrl { get; set; }
|
||||
public OmgwtfnzbsSettings Settings { get; set; }
|
||||
|
||||
public OmgwtfnzbsRequestGenerator()
|
||||
{
|
||||
BaseUrl = "http://rss.omgwtfnzbs.org/rss-search.php";
|
||||
}
|
||||
|
||||
public virtual IList<IEnumerable<IndexerRequest>> GetRecentRequests()
|
||||
{
|
||||
var pageableRequests = new List<IEnumerable<IndexerRequest>>();
|
||||
|
||||
pageableRequests.AddIfNotNull(GetPagedRequests(null));
|
||||
|
||||
return pageableRequests;
|
||||
}
|
||||
|
||||
public virtual IList<IEnumerable<IndexerRequest>> GetSearchRequests(SingleEpisodeSearchCriteria searchCriteria)
|
||||
{
|
||||
var pageableRequests = new List<IEnumerable<IndexerRequest>>();
|
||||
|
||||
foreach (var queryTitle in searchCriteria.QueryTitles)
|
||||
{
|
||||
pageableRequests.AddIfNotNull(GetPagedRequests(String.Format("{0}+S{1:00}E{2:00}",
|
||||
queryTitle,
|
||||
searchCriteria.SeasonNumber,
|
||||
searchCriteria.EpisodeNumber)));
|
||||
}
|
||||
|
||||
return pageableRequests;
|
||||
}
|
||||
|
||||
public virtual IList<IEnumerable<IndexerRequest>> GetSearchRequests(SeasonSearchCriteria searchCriteria)
|
||||
{
|
||||
var pageableRequests = new List<IEnumerable<IndexerRequest>>();
|
||||
|
||||
foreach (var queryTitle in searchCriteria.QueryTitles)
|
||||
{
|
||||
pageableRequests.AddIfNotNull(GetPagedRequests(String.Format("{0}+S{1:00}",
|
||||
queryTitle,
|
||||
searchCriteria.SeasonNumber)));
|
||||
}
|
||||
|
||||
return pageableRequests;
|
||||
}
|
||||
|
||||
public virtual IList<IEnumerable<IndexerRequest>> GetSearchRequests(DailyEpisodeSearchCriteria searchCriteria)
|
||||
{
|
||||
var pageableRequests = new List<IEnumerable<IndexerRequest>>();
|
||||
|
||||
foreach (var queryTitle in searchCriteria.QueryTitles)
|
||||
{
|
||||
pageableRequests.AddIfNotNull(GetPagedRequests(String.Format("{0}+{1:yyyy MM dd}",
|
||||
queryTitle,
|
||||
searchCriteria.AirDate)));
|
||||
}
|
||||
|
||||
return pageableRequests;
|
||||
}
|
||||
|
||||
public virtual IList<IEnumerable<IndexerRequest>> GetSearchRequests(AnimeEpisodeSearchCriteria searchCriteria)
|
||||
{
|
||||
return new List<IEnumerable<IndexerRequest>>();
|
||||
}
|
||||
|
||||
public virtual IList<IEnumerable<IndexerRequest>> GetSearchRequests(SpecialEpisodeSearchCriteria searchCriteria)
|
||||
{
|
||||
var pageableRequests = new List<IEnumerable<IndexerRequest>>();
|
||||
|
||||
foreach (var queryTitle in searchCriteria.EpisodeQueryTitles)
|
||||
{
|
||||
var query = queryTitle.Replace('+', ' ');
|
||||
query = System.Web.HttpUtility.UrlEncode(query);
|
||||
|
||||
pageableRequests.AddIfNotNull(GetPagedRequests(query));
|
||||
}
|
||||
|
||||
return pageableRequests;
|
||||
}
|
||||
|
||||
private IEnumerable<IndexerRequest> GetPagedRequests(String query)
|
||||
{
|
||||
var url = new StringBuilder();
|
||||
url.AppendFormat("{0}?catid=19,20&user={1}&api={2}&eng=1", BaseUrl, Settings.Username, Settings.ApiKey);
|
||||
|
||||
if (query.IsNotNullOrWhiteSpace())
|
||||
{
|
||||
url.AppendFormat("&search={0}", query);
|
||||
}
|
||||
|
||||
yield return new IndexerRequest(url.ToString(), HttpAccept.Rss);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -4,9 +4,15 @@ using System.Xml.Linq;
|
|||
|
||||
namespace NzbDrone.Core.Indexers.Omgwtfnzbs
|
||||
{
|
||||
public class OmgwtfnzbsParser : RssParserBase
|
||||
public class OmgwtfnzbsRssParser : RssParser
|
||||
{
|
||||
protected override string GetNzbInfoUrl(XElement item)
|
||||
public OmgwtfnzbsRssParser()
|
||||
{
|
||||
UseEnclosureUrl = true;
|
||||
UseEnclosureLength = true;
|
||||
}
|
||||
|
||||
protected override string GetInfoUrl(XElement item)
|
||||
{
|
||||
//Todo: Me thinks I need to parse details to get this...
|
||||
var match = Regex.Match(item.Description(), @"(?:\<b\>View NZB\:\<\/b\>\s\<a\shref\=\"")(?<URL>.+)(?:\""\starget)",
|
||||
|
@ -19,11 +25,5 @@ namespace NzbDrone.Core.Indexers.Omgwtfnzbs
|
|||
|
||||
return String.Empty;
|
||||
}
|
||||
|
||||
protected override long GetSize(XElement item)
|
||||
{
|
||||
var sizeString = Regex.Match(item.Description(), @"(?:Size:\<\/b\>\s\d+\.)\d{1,2}\s\w{2}(?:\<br \/\>)", RegexOptions.IgnoreCase | RegexOptions.Compiled).Value;
|
||||
return ParseSize(sizeString, true);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,54 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using NzbDrone.Common.Http;
|
||||
using NzbDrone.Core.IndexerSearch.Definitions;
|
||||
|
||||
namespace NzbDrone.Core.Indexers
|
||||
{
|
||||
public class RssIndexerRequestGenerator : IIndexerRequestGenerator
|
||||
{
|
||||
private readonly String _baseUrl;
|
||||
|
||||
public RssIndexerRequestGenerator(String baseUrl)
|
||||
{
|
||||
_baseUrl = baseUrl;
|
||||
}
|
||||
|
||||
|
||||
public virtual IList<IEnumerable<IndexerRequest>> GetRecentRequests()
|
||||
{
|
||||
var pageableRequests = new List<IEnumerable<IndexerRequest>>();
|
||||
|
||||
pageableRequests.Add(new[] { new IndexerRequest(_baseUrl, HttpAccept.Rss) });
|
||||
|
||||
return pageableRequests;
|
||||
}
|
||||
|
||||
public virtual IList<IEnumerable<IndexerRequest>> GetSearchRequests(SingleEpisodeSearchCriteria searchCriteria)
|
||||
{
|
||||
return new List<IEnumerable<IndexerRequest>>();
|
||||
}
|
||||
|
||||
public virtual IList<IEnumerable<IndexerRequest>> GetSearchRequests(SeasonSearchCriteria searchCriteria)
|
||||
{
|
||||
return new List<IEnumerable<IndexerRequest>>();
|
||||
}
|
||||
|
||||
public virtual IList<IEnumerable<IndexerRequest>> GetSearchRequests(DailyEpisodeSearchCriteria searchCriteria)
|
||||
{
|
||||
return new List<IEnumerable<IndexerRequest>>();
|
||||
}
|
||||
|
||||
public virtual IList<IEnumerable<IndexerRequest>> GetSearchRequests(AnimeEpisodeSearchCriteria searchCriteria)
|
||||
{
|
||||
return new List<IEnumerable<IndexerRequest>>();
|
||||
}
|
||||
|
||||
public virtual IList<IEnumerable<IndexerRequest>> GetSearchRequests(SpecialEpisodeSearchCriteria searchCriteria)
|
||||
{
|
||||
return new List<IEnumerable<IndexerRequest>>();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,235 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.RegularExpressions;
|
||||
using System.Xml;
|
||||
using System.Xml.Linq;
|
||||
using NLog;
|
||||
using NzbDrone.Common;
|
||||
using NzbDrone.Common.Instrumentation;
|
||||
using NzbDrone.Core.Indexers.Exceptions;
|
||||
using NzbDrone.Core.Parser.Model;
|
||||
|
||||
namespace NzbDrone.Core.Indexers
|
||||
{
|
||||
public class RssParser : IParseIndexerResponse
|
||||
{
|
||||
protected readonly Logger _logger;
|
||||
|
||||
// Use the 'guid' element content as InfoUrl.
|
||||
public Boolean UseGuidInfoUrl { get; set; }
|
||||
|
||||
// Use the enclosure as download url and/or length.
|
||||
public Boolean UseEnclosureUrl { get; set; }
|
||||
public Boolean UseEnclosureLength { get; set; }
|
||||
|
||||
// Parse "Size: 1.3 GB" or "1.3 GB" parts in the description element and use that as Size.
|
||||
public Boolean ParseSizeInDescription { get; set; }
|
||||
|
||||
public RssParser()
|
||||
{
|
||||
_logger = NzbDroneLogger.GetLogger(this);
|
||||
}
|
||||
|
||||
public virtual IList<ReleaseInfo> ParseResponse(IndexerResponse indexerResponse)
|
||||
{
|
||||
var releases = new List<ReleaseInfo>();
|
||||
|
||||
if (!PreProcess(indexerResponse))
|
||||
{
|
||||
return releases;
|
||||
}
|
||||
|
||||
using (var xmlTextReader = XmlReader.Create(new StringReader(indexerResponse.Content), new XmlReaderSettings { DtdProcessing = DtdProcessing.Ignore, IgnoreComments = true }))
|
||||
{
|
||||
var document = XDocument.Load(xmlTextReader);
|
||||
var items = document.Root.Element("channel").Elements("item");
|
||||
|
||||
foreach (var item in items)
|
||||
{
|
||||
try
|
||||
{
|
||||
var reportInfo = ProcessItem(item);
|
||||
|
||||
releases.AddIfNotNull(reportInfo);
|
||||
}
|
||||
catch (Exception itemEx)
|
||||
{
|
||||
itemEx.Data.Add("Item", item.Title());
|
||||
_logger.ErrorException("An error occurred while processing feed item from " + indexerResponse.Request.Url, itemEx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return releases;
|
||||
}
|
||||
|
||||
protected virtual ReleaseInfo CreateNewReleaseInfo()
|
||||
{
|
||||
return new ReleaseInfo();
|
||||
}
|
||||
|
||||
protected virtual Boolean PreProcess(IndexerResponse indexerResponse)
|
||||
{
|
||||
if (indexerResponse.HttpResponse.StatusCode != System.Net.HttpStatusCode.OK)
|
||||
{
|
||||
throw new IndexerException(indexerResponse, "Indexer API call resulted in an unexpected StatusCode [{0}]", indexerResponse.HttpResponse.StatusCode);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
protected ReleaseInfo ProcessItem(XElement item)
|
||||
{
|
||||
var releaseInfo = CreateNewReleaseInfo();
|
||||
|
||||
releaseInfo = ProcessItem(item, releaseInfo);
|
||||
|
||||
_logger.Trace("Parsed: {0}", releaseInfo.Title);
|
||||
|
||||
return PostProcess(item, releaseInfo);
|
||||
}
|
||||
|
||||
protected virtual ReleaseInfo ProcessItem(XElement item, ReleaseInfo releaseInfo)
|
||||
{
|
||||
releaseInfo.Guid = GetGuid(item);
|
||||
releaseInfo.Title = GetTitle(item);
|
||||
releaseInfo.PublishDate = GetPublishDate(item);
|
||||
releaseInfo.DownloadUrl = GetDownloadUrl(item);
|
||||
releaseInfo.InfoUrl = GetInfoUrl(item);
|
||||
releaseInfo.CommentUrl = GetCommentUrl(item);
|
||||
|
||||
try
|
||||
{
|
||||
releaseInfo.Size = GetSize(item);
|
||||
}
|
||||
catch (Exception)
|
||||
{
|
||||
throw new SizeParsingException("Unable to parse size from: {0}", releaseInfo.Title);
|
||||
}
|
||||
|
||||
return releaseInfo;
|
||||
}
|
||||
|
||||
protected virtual ReleaseInfo PostProcess(XElement item, ReleaseInfo releaseInfo)
|
||||
{
|
||||
return releaseInfo;
|
||||
}
|
||||
|
||||
protected virtual String GetGuid(XElement item)
|
||||
{
|
||||
return item.TryGetValue("guid", Guid.NewGuid().ToString());
|
||||
}
|
||||
|
||||
protected virtual String GetTitle(XElement item)
|
||||
{
|
||||
return item.TryGetValue("title", "Unknown");
|
||||
}
|
||||
|
||||
protected virtual DateTime GetPublishDate(XElement item)
|
||||
{
|
||||
var dateString = item.TryGetValue("pubDate");
|
||||
|
||||
return XElementExtensions.ParseDate(dateString);
|
||||
}
|
||||
|
||||
protected virtual string GetDownloadUrl(XElement item)
|
||||
{
|
||||
if (UseEnclosureUrl)
|
||||
{
|
||||
return item.Element("enclosure").Attribute("url").Value;
|
||||
}
|
||||
else
|
||||
{
|
||||
return item.Element("link").Value;
|
||||
}
|
||||
}
|
||||
|
||||
protected virtual string GetInfoUrl(XElement item)
|
||||
{
|
||||
if (UseGuidInfoUrl)
|
||||
{
|
||||
return (String)item.Element("guid");
|
||||
}
|
||||
|
||||
return String.Empty;
|
||||
}
|
||||
|
||||
protected virtual string GetCommentUrl(XElement item)
|
||||
{
|
||||
return (String)item.Element("comments");
|
||||
}
|
||||
|
||||
protected virtual long GetSize(XElement item)
|
||||
{
|
||||
if (UseEnclosureLength)
|
||||
{
|
||||
return GetEnclosureLength(item);
|
||||
}
|
||||
else if (ParseSizeInDescription)
|
||||
{
|
||||
return ParseSize(item.Element("description").Value, true);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
protected virtual long GetEnclosureLength(XElement item)
|
||||
{
|
||||
var enclosure = item.Element("enclosure");
|
||||
|
||||
if (enclosure != null)
|
||||
{
|
||||
return (long)enclosure.Attribute("length");
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
private static readonly Regex ParseSizeRegex = new Regex(@"(?<value>\d+\.\d{1,2}|\d+\,\d+\.\d{1,2}|\d+)\W?(?<unit>[KMG]i?B)",
|
||||
RegexOptions.IgnoreCase | RegexOptions.Compiled);
|
||||
|
||||
public static Int64 ParseSize(String sizeString, Boolean defaultToBinaryPrefix)
|
||||
{
|
||||
var match = ParseSizeRegex.Matches(sizeString);
|
||||
|
||||
if (match.Count != 0)
|
||||
{
|
||||
var value = Decimal.Parse(Regex.Replace(match[0].Groups["value"].Value, "\\,", ""), CultureInfo.InvariantCulture);
|
||||
|
||||
var unit = match[0].Groups["unit"].Value.ToLower();
|
||||
|
||||
switch (unit)
|
||||
{
|
||||
case "kb":
|
||||
return ConvertToBytes(Convert.ToDouble(value), 1, defaultToBinaryPrefix);
|
||||
case "mb":
|
||||
return ConvertToBytes(Convert.ToDouble(value), 2, defaultToBinaryPrefix);
|
||||
case "gb":
|
||||
return ConvertToBytes(Convert.ToDouble(value), 3, defaultToBinaryPrefix);
|
||||
case "kib":
|
||||
return ConvertToBytes(Convert.ToDouble(value), 1, true);
|
||||
case "mib":
|
||||
return ConvertToBytes(Convert.ToDouble(value), 2, true);
|
||||
case "gib":
|
||||
return ConvertToBytes(Convert.ToDouble(value), 3, true);
|
||||
default:
|
||||
return (Int64)value;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
private static Int64 ConvertToBytes(Double value, Int32 power, Boolean binaryPrefix)
|
||||
{
|
||||
var prefix = binaryPrefix ? 1024 : 1000;
|
||||
var multiplier = Math.Pow(prefix, power);
|
||||
var result = value * multiplier;
|
||||
|
||||
return Convert.ToInt64(result);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,168 +0,0 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.RegularExpressions;
|
||||
using System.Xml;
|
||||
using System.Xml.Linq;
|
||||
using NLog;
|
||||
using NzbDrone.Common.Instrumentation;
|
||||
using NzbDrone.Core.Indexers.Newznab;
|
||||
using NzbDrone.Core.Parser.Model;
|
||||
|
||||
namespace NzbDrone.Core.Indexers
|
||||
{
|
||||
public abstract class RssParserBase : IParseFeed
|
||||
{
|
||||
protected readonly Logger _logger;
|
||||
|
||||
protected virtual ReleaseInfo CreateNewReleaseInfo()
|
||||
{
|
||||
return new ReleaseInfo();
|
||||
}
|
||||
|
||||
protected RssParserBase()
|
||||
{
|
||||
_logger = NzbDroneLogger.GetLogger(this);
|
||||
}
|
||||
|
||||
public virtual IEnumerable<ReleaseInfo> Process(string xml, string url)
|
||||
{
|
||||
PreProcess(xml, url);
|
||||
|
||||
using (var xmlTextReader = XmlReader.Create(new StringReader(xml), new XmlReaderSettings { DtdProcessing = DtdProcessing.Ignore, IgnoreComments = true }))
|
||||
{
|
||||
|
||||
var document = XDocument.Load(xmlTextReader);
|
||||
var items = document.Descendants("item");
|
||||
|
||||
var result = new List<ReleaseInfo>();
|
||||
|
||||
foreach (var item in items)
|
||||
{
|
||||
try
|
||||
{
|
||||
var reportInfo = ParseFeedItem(item.StripNameSpace(), url);
|
||||
|
||||
if (reportInfo != null)
|
||||
{
|
||||
result.Add(reportInfo);
|
||||
}
|
||||
}
|
||||
catch (Exception itemEx)
|
||||
{
|
||||
itemEx.Data.Add("Item", item.Title());
|
||||
_logger.ErrorException("An error occurred while processing feed item from " + url, itemEx);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
private ReleaseInfo ParseFeedItem(XElement item, string url)
|
||||
{
|
||||
var reportInfo = CreateNewReleaseInfo();
|
||||
|
||||
reportInfo.Guid = GetGuid(item);
|
||||
reportInfo.Title = GetTitle(item);
|
||||
reportInfo.PublishDate = GetPublishDate(item);
|
||||
reportInfo.DownloadUrl = GetNzbUrl(item);
|
||||
reportInfo.InfoUrl = GetNzbInfoUrl(item);
|
||||
|
||||
try
|
||||
{
|
||||
reportInfo.Size = GetSize(item);
|
||||
}
|
||||
catch (Exception)
|
||||
{
|
||||
throw new SizeParsingException("Unable to parse size from: {0} [{1}]", reportInfo.Title, url);
|
||||
}
|
||||
|
||||
_logger.Trace("Parsed: {0}", reportInfo.Title);
|
||||
|
||||
return PostProcessor(item, reportInfo);
|
||||
}
|
||||
|
||||
protected virtual String GetGuid(XElement item)
|
||||
{
|
||||
return item.TryGetValue("guid", Guid.NewGuid().ToString());
|
||||
}
|
||||
|
||||
protected virtual string GetTitle(XElement item)
|
||||
{
|
||||
return item.Title();
|
||||
}
|
||||
|
||||
protected virtual DateTime GetPublishDate(XElement item)
|
||||
{
|
||||
return item.PublishDate();
|
||||
}
|
||||
|
||||
protected virtual string GetNzbUrl(XElement item)
|
||||
{
|
||||
return item.Links().First();
|
||||
}
|
||||
|
||||
protected virtual string GetNzbInfoUrl(XElement item)
|
||||
{
|
||||
return String.Empty;
|
||||
}
|
||||
|
||||
protected abstract long GetSize(XElement item);
|
||||
|
||||
protected virtual void PreProcess(string source, string url)
|
||||
{
|
||||
}
|
||||
|
||||
protected virtual ReleaseInfo PostProcessor(XElement item, ReleaseInfo currentResult)
|
||||
{
|
||||
return currentResult;
|
||||
}
|
||||
|
||||
private static readonly Regex ReportSizeRegex = new Regex(@"(?<value>\d+\.\d{1,2}|\d+\,\d+\.\d{1,2}|\d+)\W?(?<unit>GB|MB|GiB|MiB)",
|
||||
RegexOptions.IgnoreCase | RegexOptions.Compiled);
|
||||
|
||||
public static Int64 ParseSize(String sizeString, Boolean defaultToBinaryPrefix)
|
||||
{
|
||||
var match = ReportSizeRegex.Matches(sizeString);
|
||||
|
||||
if (match.Count != 0)
|
||||
{
|
||||
var cultureInfo = new CultureInfo("en-US");
|
||||
var value = Decimal.Parse(Regex.Replace(match[0].Groups["value"].Value, "\\,", ""), cultureInfo);
|
||||
|
||||
var unit = match[0].Groups["unit"].Value.ToLower();
|
||||
|
||||
switch (unit)
|
||||
{
|
||||
case "kb":
|
||||
return ConvertToBytes(Convert.ToDouble(value), 1, defaultToBinaryPrefix);
|
||||
case "mb":
|
||||
return ConvertToBytes(Convert.ToDouble(value), 2, defaultToBinaryPrefix);
|
||||
case "gb":
|
||||
return ConvertToBytes(Convert.ToDouble(value), 3, defaultToBinaryPrefix);
|
||||
case "kib":
|
||||
return ConvertToBytes(Convert.ToDouble(value), 1, true);
|
||||
case "mib":
|
||||
return ConvertToBytes(Convert.ToDouble(value), 2, true);
|
||||
case "gib":
|
||||
return ConvertToBytes(Convert.ToDouble(value), 3, true);
|
||||
default:
|
||||
return (Int64)value;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
private static Int64 ConvertToBytes(Double value, Int32 power, Boolean binaryPrefix)
|
||||
{
|
||||
var prefix = binaryPrefix ? 1024 : 1000;
|
||||
var multiplier = Math.Pow(prefix, power);
|
||||
var result = value * multiplier;
|
||||
|
||||
return Convert.ToInt64(result);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,56 +1,34 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using FluentValidation.Results;
|
||||
using NLog;
|
||||
using NzbDrone.Common.Http;
|
||||
using NzbDrone.Core.Configuration;
|
||||
using NzbDrone.Core.Parser;
|
||||
using NzbDrone.Core.ThingiProvider;
|
||||
|
||||
namespace NzbDrone.Core.Indexers.Wombles
|
||||
{
|
||||
public class Wombles : IndexerBase<NullConfig>
|
||||
public class Wombles : HttpIndexerBase<NullConfig>
|
||||
{
|
||||
public override DownloadProtocol Protocol { get { return DownloadProtocol.Usenet; } }
|
||||
public override bool SupportsSearch { get { return false; } }
|
||||
|
||||
public override IParseFeed Parser
|
||||
public override IParseIndexerResponse GetParser()
|
||||
{
|
||||
get
|
||||
{
|
||||
return new WomblesParser();
|
||||
}
|
||||
return new WomblesRssParser();
|
||||
}
|
||||
|
||||
public override IEnumerable<string> RecentFeed
|
||||
public override IIndexerRequestGenerator GetRequestGenerator()
|
||||
{
|
||||
get { yield return "http://newshost.co.za/rss/?sec=TV&fr=false"; }
|
||||
return new RssIndexerRequestGenerator("http://newshost.co.za/rss/?sec=TV&fr=false");
|
||||
}
|
||||
|
||||
public override IEnumerable<string> GetEpisodeSearchUrls(List<String> titles, int tvRageId, int seasonNumber, int episodeNumber)
|
||||
public Wombles(IHttpClient httpClient, IConfigService configService, IParsingService parsingService, Logger logger)
|
||||
: base(httpClient, configService, parsingService, logger)
|
||||
{
|
||||
return new List<string>();
|
||||
}
|
||||
|
||||
public override IEnumerable<string> GetSeasonSearchUrls(List<String> titles, int tvRageId, int seasonNumber, int offset)
|
||||
{
|
||||
return new List<string>();
|
||||
}
|
||||
|
||||
public override IEnumerable<string> GetDailyEpisodeSearchUrls(List<String> titles, int tvRageId, DateTime date)
|
||||
{
|
||||
return new List<string>();
|
||||
}
|
||||
|
||||
public override IEnumerable<string> GetAnimeEpisodeSearchUrls(List<String> titles, int tvRageId, int absoluteEpisodeNumber)
|
||||
{
|
||||
return new string[0];
|
||||
}
|
||||
|
||||
public override IEnumerable<string> GetSearchUrls(string query, int offset)
|
||||
{
|
||||
return new List<string>();
|
||||
}
|
||||
|
||||
public override ValidationResult Test()
|
||||
{
|
||||
return new ValidationResult();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -3,15 +3,11 @@ using System.Xml.Linq;
|
|||
|
||||
namespace NzbDrone.Core.Indexers.Wombles
|
||||
{
|
||||
public class WomblesParser : RssParserBase
|
||||
public class WomblesRssParser : RssParser
|
||||
{
|
||||
protected override string GetNzbInfoUrl(XElement item)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
protected override long GetSize(XElement item)
|
||||
{
|
||||
// TODO: this can be found in the description element.
|
||||
return 0;
|
||||
}
|
||||
|
|
@ -384,42 +384,43 @@
|
|||
<Compile Include="Housekeeping\HousekeepingService.cs" />
|
||||
<Compile Include="Housekeeping\IHousekeepingTask.cs" />
|
||||
<Compile Include="Indexers\Animezb\Animezb.cs" />
|
||||
<Compile Include="Indexers\Animezb\AnimezbParser.cs" />
|
||||
<Compile Include="Indexers\BasicTorrentRssParser.cs" />
|
||||
<Compile Include="Indexers\Animezb\AnimezbRequestGenerator.cs" />
|
||||
<Compile Include="Indexers\DownloadProtocol.cs" />
|
||||
<Compile Include="Indexers\Exceptions\ApiKeyException.cs" />
|
||||
<Compile Include="Indexers\Exceptions\IndexerException.cs" />
|
||||
<Compile Include="Indexers\Exceptions\RequestLimitReachedException.cs" />
|
||||
<Compile Include="Indexers\Fanzub\Fanzub.cs" />
|
||||
<Compile Include="Indexers\Fanzub\FanzubParser.cs" />
|
||||
<Compile Include="Indexers\Fanzub\FanzubRequestGenerator.cs" />
|
||||
<Compile Include="Indexers\FetchAndParseRssService.cs" />
|
||||
<Compile Include="Indexers\IIndexer.cs" />
|
||||
<Compile Include="Indexers\IIndexerRequestGenerator.cs" />
|
||||
<Compile Include="Indexers\IndexerBase.cs" />
|
||||
<Compile Include="Indexers\IndexerDefinition.cs" />
|
||||
<Compile Include="Indexers\IndexerFactory.cs">
|
||||
<SubType>Code</SubType>
|
||||
</Compile>
|
||||
<Compile Include="Indexers\IndexerFetchService.cs">
|
||||
<SubType>Code</SubType>
|
||||
</Compile>
|
||||
<Compile Include="Indexers\IndexerRepository.cs" />
|
||||
<Compile Include="Indexers\IndexerRequest.cs" />
|
||||
<Compile Include="Indexers\IndexerSettingUpdatedEvent.cs" />
|
||||
<Compile Include="Indexers\IParseFeed.cs" />
|
||||
<Compile Include="Indexers\IProcessIndexerResponse.cs" />
|
||||
<Compile Include="Indexers\Newznab\Newznab.cs" />
|
||||
<Compile Include="Indexers\Newznab\NewznabException.cs" />
|
||||
<Compile Include="Indexers\Newznab\NewznabParser.cs" />
|
||||
<Compile Include="Indexers\Newznab\NewznabPreProcessor.cs" />
|
||||
<Compile Include="Indexers\Newznab\NewznabRequestGenerator.cs" />
|
||||
<Compile Include="Indexers\Newznab\NewznabRssParser.cs" />
|
||||
<Compile Include="Indexers\Newznab\NewznabSettings.cs" />
|
||||
<Compile Include="Indexers\Newznab\SizeParsingException.cs" />
|
||||
<Compile Include="Indexers\NewznabTestService.cs" />
|
||||
<Compile Include="Indexers\Exceptions\SizeParsingException.cs" />
|
||||
<Compile Include="Indexers\Omgwtfnzbs\OmgwtfnzbsRequestGenerator.cs" />
|
||||
<Compile Include="Indexers\Omgwtfnzbs\Omgwtfnzbs.cs" />
|
||||
<Compile Include="Indexers\Omgwtfnzbs\OmgwtfnzbsParser.cs" />
|
||||
<Compile Include="Indexers\Omgwtfnzbs\OmgwtfnzbsRssParser.cs" />
|
||||
<Compile Include="Indexers\Omgwtfnzbs\OmgwtfnzbsSettings.cs" />
|
||||
<Compile Include="Indexers\RssParserBase.cs" />
|
||||
<Compile Include="Indexers\HttpIndexerBase.cs" />
|
||||
<Compile Include="Indexers\RssIndexerRequestGenerator.cs" />
|
||||
<Compile Include="Indexers\RssParser.cs" />
|
||||
<Compile Include="Indexers\RssSyncCommand.cs" />
|
||||
<Compile Include="Indexers\RssSyncCompleteEvent.cs" />
|
||||
<Compile Include="Indexers\RssSyncService.cs" />
|
||||
<Compile Include="Indexers\Wombles\Wombles.cs" />
|
||||
<Compile Include="Indexers\Wombles\WomblesParser.cs" />
|
||||
<Compile Include="Indexers\Wombles\WomblesRssParser.cs" />
|
||||
<Compile Include="Indexers\XElementExtensions.cs" />
|
||||
<Compile Include="IndexerSearch\Definitions\AnimeEpisodeSearchCriteria.cs" />
|
||||
<Compile Include="IndexerSearch\Definitions\DailyEpisodeSearchCriteria.cs" />
|
||||
|
|
Loading…
Reference in New Issue