Compare commits
3 commits
dd34daae6b
...
497364c126
| Author | SHA1 | Date | |
|---|---|---|---|
| 497364c126 | |||
| 1b0671b34c | |||
| 1a54fdbcd1 |
55 changed files with 255 additions and 13197 deletions
|
|
@ -1,23 +1,21 @@
|
||||||
module.exports = api => {
|
module.exports = api => {
|
||||||
const isTest = api.env('test');
|
const isTest = api.env('test');
|
||||||
|
|
||||||
const preset = [
|
const preset = [
|
||||||
"@babel/preset-env" , { targets: 'defaults' }
|
"@babel/preset-env", { targets: 'defaults' }
|
||||||
|
];
|
||||||
|
const testPreset = [
|
||||||
|
"@babel/preset-env", { targets: { node: process.versions.node } }
|
||||||
];
|
];
|
||||||
const testPreset = [
|
|
||||||
"@babel/preset-env", { targets: { node: process.versions.node } }
|
|
||||||
];
|
|
||||||
|
|
||||||
const plugins = [
|
const plugins = [
|
||||||
"@babel/plugin-syntax-dynamic-import",
|
"@babel/plugin-syntax-dynamic-import",
|
||||||
"@babel/plugin-transform-react-jsx",
|
"@babel/plugin-transform-react-jsx",
|
||||||
"@babel/plugin-syntax-function-bind",
|
"@babel/plugin-proposal-class-properties"
|
||||||
"@babel/plugin-proposal-function-bind",
|
]
|
||||||
"@babel/plugin-proposal-class-properties"
|
|
||||||
]
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"presets": [isTest ? testPreset: preset],
|
"presets": [isTest ? testPreset : preset],
|
||||||
"plugins": plugins
|
"plugins": plugins
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -29,11 +29,6 @@ x-django-env: &django-env
|
||||||
EMAIL_USE_SSL:
|
EMAIL_USE_SSL:
|
||||||
EMAIL_DEFAULT_FROM:
|
EMAIL_DEFAULT_FROM:
|
||||||
|
|
||||||
# Reddit
|
|
||||||
REDDIT_CLIENT_ID:
|
|
||||||
REDDIT_CLIENT_SECRET:
|
|
||||||
REDDIT_CALLBACK_URL:
|
|
||||||
|
|
||||||
# Sentry
|
# Sentry
|
||||||
SENTRY_DSN:
|
SENTRY_DSN:
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -11,12 +11,6 @@ class UserAdminForm(UserChangeForm):
|
||||||
class Meta:
|
class Meta:
|
||||||
widgets = {
|
widgets = {
|
||||||
"email": forms.EmailInput(attrs={"size": "50"}),
|
"email": forms.EmailInput(attrs={"size": "50"}),
|
||||||
"reddit_access_token": forms.PasswordInput(
|
|
||||||
attrs={"size": "90"}, render_value=True
|
|
||||||
),
|
|
||||||
"reddit_refresh_token": forms.PasswordInput(
|
|
||||||
attrs={"size": "90"}, render_value=True
|
|
||||||
),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -34,10 +28,6 @@ class UserAdmin(DjangoUserAdmin):
|
||||||
_("User settings"),
|
_("User settings"),
|
||||||
{"fields": ("email", "password", "first_name", "last_name", "is_active")},
|
{"fields": ("email", "password", "first_name", "last_name", "is_active")},
|
||||||
),
|
),
|
||||||
(
|
|
||||||
_("Reddit settings"),
|
|
||||||
{"fields": ("reddit_access_token", "reddit_refresh_token")},
|
|
||||||
),
|
|
||||||
(
|
(
|
||||||
_("Permission settings"),
|
_("Permission settings"),
|
||||||
{"classes": ("collapse",), "fields": ("is_staff", "is_superuser")},
|
{"classes": ("collapse",), "fields": ("is_staff", "is_superuser")},
|
||||||
|
|
|
||||||
|
|
@ -39,10 +39,6 @@ class UserManager(DjangoUserManager):
|
||||||
class User(AbstractUser):
|
class User(AbstractUser):
|
||||||
email = models.EmailField(_("email address"), unique=True)
|
email = models.EmailField(_("email address"), unique=True)
|
||||||
|
|
||||||
# reddit settings
|
|
||||||
reddit_refresh_token = models.CharField(max_length=255, blank=True, null=True)
|
|
||||||
reddit_access_token = models.CharField(max_length=255, blank=True, null=True)
|
|
||||||
|
|
||||||
# settings
|
# settings
|
||||||
auto_mark_read = models.BooleanField(
|
auto_mark_read = models.BooleanField(
|
||||||
_("Auto read marking"),
|
_("Auto read marking"),
|
||||||
|
|
|
||||||
|
|
@ -1,47 +0,0 @@
|
||||||
{% extends "sidebar.html" %}
|
|
||||||
{% load i18n %}
|
|
||||||
|
|
||||||
{% block content %}
|
|
||||||
<main id="integrations--page" class="main" data-render-sidebar=true>
|
|
||||||
<div class="main__container">
|
|
||||||
<section class="section">
|
|
||||||
{% include "components/header/header.html" with title="Integrations" only %}
|
|
||||||
|
|
||||||
<div class="integrations">
|
|
||||||
<h3 class="integrations__title">Reddit</h3>
|
|
||||||
<div class="integrations__controls">
|
|
||||||
{% if reddit_authorization_url %}
|
|
||||||
<a class="link button button--reddit" href="{{ reddit_authorization_url }}">
|
|
||||||
{% trans "Authorize account" %}
|
|
||||||
</a>
|
|
||||||
{% else %}
|
|
||||||
<button class="button button--reddit button--disabled" disabled>
|
|
||||||
{% trans "Authorize account" %}
|
|
||||||
</button>
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
{% if reddit_refresh_url %}
|
|
||||||
<a class="link button button--reddit" href="{{ reddit_refresh_url }}">
|
|
||||||
{% trans "Refresh token" %}
|
|
||||||
</a>
|
|
||||||
{% else %}
|
|
||||||
<button class="button button--reddit button--disabled" disabled>
|
|
||||||
{% trans "Refresh token" %}
|
|
||||||
</button>
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
{% if reddit_revoke_url %}
|
|
||||||
<a class="link button button--reddit" href="{{ reddit_revoke_url }}">
|
|
||||||
{% trans "Deauthorize account" %}
|
|
||||||
</a>
|
|
||||||
{% else %}
|
|
||||||
<button class="button button--reddit button--disabled" disabled>
|
|
||||||
{% trans "Deauthorize account" %}
|
|
||||||
</button>
|
|
||||||
{% endif %}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</section>
|
|
||||||
</div>
|
|
||||||
</main>
|
|
||||||
{% endblock %}
|
|
||||||
|
|
@ -1,22 +0,0 @@
|
||||||
{% extends "sidebar.html" %}
|
|
||||||
{% load i18n %}
|
|
||||||
|
|
||||||
{% block content %}
|
|
||||||
<main id="reddit--page" class="main" data-render-sidebar=true>
|
|
||||||
<div class="main__container">
|
|
||||||
<section class="section text-section">
|
|
||||||
{% if error %}
|
|
||||||
<h1 class="h1">{% trans "Reddit authorization failed" %}</h1>
|
|
||||||
<p>{{ error }}</p>
|
|
||||||
{% elif access_token and refresh_token %}
|
|
||||||
<h1 class="h1">{% trans "Reddit account is linked" %}</h1>
|
|
||||||
<p>{% trans "Your reddit account was successfully linked." %}</p>
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
<p>
|
|
||||||
<a class="link" href="{% url 'accounts:settings:integrations' %}">{% trans "Return to integrations page" %}</a>
|
|
||||||
</p>
|
|
||||||
</section>
|
|
||||||
</div>
|
|
||||||
</main>
|
|
||||||
{% endblock %}
|
|
||||||
|
|
@ -1,275 +0,0 @@
|
||||||
from unittest.mock import patch
|
|
||||||
from urllib.parse import urlencode
|
|
||||||
from uuid import uuid4
|
|
||||||
|
|
||||||
from django.core.cache import cache
|
|
||||||
from django.test import TestCase
|
|
||||||
from django.urls import reverse
|
|
||||||
|
|
||||||
from bs4 import BeautifulSoup
|
|
||||||
|
|
||||||
from newsreader.accounts.tests.factories import UserFactory
|
|
||||||
from newsreader.news.collection.exceptions import (
|
|
||||||
StreamException,
|
|
||||||
StreamTooManyException,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class IntegrationsViewTestCase(TestCase):
|
|
||||||
def setUp(self):
|
|
||||||
self.user = UserFactory(email="test@test.nl", password="test")
|
|
||||||
self.client.force_login(self.user)
|
|
||||||
|
|
||||||
self.url = reverse("accounts:settings:integrations")
|
|
||||||
|
|
||||||
|
|
||||||
class RedditIntegrationsTestCase(IntegrationsViewTestCase):
|
|
||||||
def test_reddit_authorization(self):
|
|
||||||
self.user.reddit_refresh_token = None
|
|
||||||
self.user.save()
|
|
||||||
|
|
||||||
response = self.client.get(self.url)
|
|
||||||
|
|
||||||
soup = BeautifulSoup(response.content, features="lxml")
|
|
||||||
button = soup.find("a", class_="link button button--reddit")
|
|
||||||
|
|
||||||
self.assertEquals(button.text.strip(), "Authorize account")
|
|
||||||
|
|
||||||
def test_reddit_refresh_token(self):
|
|
||||||
self.user.reddit_refresh_token = "jadajadajada"
|
|
||||||
self.user.reddit_access_token = None
|
|
||||||
self.user.save()
|
|
||||||
|
|
||||||
response = self.client.get(self.url)
|
|
||||||
|
|
||||||
soup = BeautifulSoup(response.content, features="lxml")
|
|
||||||
button = soup.find("a", class_="link button button--reddit")
|
|
||||||
|
|
||||||
self.assertEquals(button.text.strip(), "Refresh token")
|
|
||||||
|
|
||||||
def test_reddit_revoke(self):
|
|
||||||
self.user.reddit_refresh_token = "jadajadajada"
|
|
||||||
self.user.reddit_access_token = None
|
|
||||||
self.user.save()
|
|
||||||
|
|
||||||
response = self.client.get(self.url)
|
|
||||||
|
|
||||||
soup = BeautifulSoup(response.content, features="lxml")
|
|
||||||
buttons = soup.find_all("a", class_="link button button--reddit")
|
|
||||||
|
|
||||||
self.assertIn(
|
|
||||||
"Deauthorize account", [button.text.strip() for button in buttons]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class RedditTemplateViewTestCase(TestCase):
|
|
||||||
def setUp(self):
|
|
||||||
self.user = UserFactory(email="test@test.nl", password="test")
|
|
||||||
self.client.force_login(self.user)
|
|
||||||
|
|
||||||
self.base_url = reverse("accounts:settings:reddit-template")
|
|
||||||
self.state = str(uuid4())
|
|
||||||
|
|
||||||
self.patch = patch("newsreader.news.collection.reddit.post")
|
|
||||||
self.mocked_post = self.patch.start()
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
patch.stopall()
|
|
||||||
|
|
||||||
def test_simple(self):
|
|
||||||
response = self.client.get(self.base_url)
|
|
||||||
|
|
||||||
self.assertEquals(response.status_code, 200)
|
|
||||||
self.assertContains(response, "Return to integrations page")
|
|
||||||
|
|
||||||
def test_successful_authorization(self):
|
|
||||||
self.mocked_post.return_value.json.return_value = {
|
|
||||||
"access_token": "1001010412",
|
|
||||||
"refresh_token": "134510143",
|
|
||||||
}
|
|
||||||
|
|
||||||
cache.set(f"{self.user.email}-reddit-auth", self.state)
|
|
||||||
|
|
||||||
params = {"state": self.state, "code": "Valid code"}
|
|
||||||
url = f"{self.base_url}?{urlencode(params)}"
|
|
||||||
|
|
||||||
response = self.client.get(url)
|
|
||||||
|
|
||||||
self.mocked_post.assert_called_once()
|
|
||||||
|
|
||||||
self.assertEquals(response.status_code, 200)
|
|
||||||
self.assertContains(response, "Your reddit account was successfully linked.")
|
|
||||||
|
|
||||||
self.user.refresh_from_db()
|
|
||||||
|
|
||||||
self.assertEquals(self.user.reddit_access_token, "1001010412")
|
|
||||||
self.assertEquals(self.user.reddit_refresh_token, "134510143")
|
|
||||||
|
|
||||||
self.assertEquals(cache.get(f"{self.user.email}-reddit-auth"), None)
|
|
||||||
|
|
||||||
def test_error(self):
|
|
||||||
params = {"error": "Denied authorization"}
|
|
||||||
|
|
||||||
url = f"{self.base_url}?{urlencode(params)}"
|
|
||||||
|
|
||||||
response = self.client.get(url)
|
|
||||||
|
|
||||||
self.assertEquals(response.status_code, 200)
|
|
||||||
self.assertContains(response, "Denied authorization")
|
|
||||||
|
|
||||||
def test_invalid_state(self):
|
|
||||||
cache.set(f"{self.user.email}-reddit-auth", str(uuid4()))
|
|
||||||
|
|
||||||
params = {"code": "Valid code", "state": "Invalid state"}
|
|
||||||
|
|
||||||
url = f"{self.base_url}?{urlencode(params)}"
|
|
||||||
|
|
||||||
response = self.client.get(url)
|
|
||||||
|
|
||||||
self.assertEquals(response.status_code, 200)
|
|
||||||
self.assertContains(
|
|
||||||
response, "The saved state for Reddit authorization did not match"
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_stream_error(self):
|
|
||||||
self.mocked_post.side_effect = StreamTooManyException
|
|
||||||
|
|
||||||
cache.set(f"{self.user.email}-reddit-auth", self.state)
|
|
||||||
|
|
||||||
params = {"state": self.state, "code": "Valid code"}
|
|
||||||
url = f"{self.base_url}?{urlencode(params)}"
|
|
||||||
|
|
||||||
response = self.client.get(url)
|
|
||||||
|
|
||||||
self.mocked_post.assert_called_once()
|
|
||||||
|
|
||||||
self.assertEquals(response.status_code, 200)
|
|
||||||
self.assertContains(response, "Too many requests")
|
|
||||||
|
|
||||||
self.user.refresh_from_db()
|
|
||||||
|
|
||||||
self.assertEquals(self.user.reddit_access_token, None)
|
|
||||||
self.assertEquals(self.user.reddit_refresh_token, None)
|
|
||||||
|
|
||||||
self.assertEquals(cache.get(f"{self.user.email}-reddit-auth"), self.state)
|
|
||||||
|
|
||||||
def test_unexpected_json(self):
|
|
||||||
self.mocked_post.return_value.json.return_value = {"message": "Happy eastern"}
|
|
||||||
|
|
||||||
cache.set(f"{self.user.email}-reddit-auth", self.state)
|
|
||||||
|
|
||||||
params = {"state": self.state, "code": "Valid code"}
|
|
||||||
url = f"{self.base_url}?{urlencode(params)}"
|
|
||||||
|
|
||||||
response = self.client.get(url)
|
|
||||||
|
|
||||||
self.mocked_post.assert_called_once()
|
|
||||||
|
|
||||||
self.assertEquals(response.status_code, 200)
|
|
||||||
self.assertContains(response, "Access and refresh token not found in response")
|
|
||||||
|
|
||||||
self.user.refresh_from_db()
|
|
||||||
|
|
||||||
self.assertEquals(self.user.reddit_access_token, None)
|
|
||||||
self.assertEquals(self.user.reddit_refresh_token, None)
|
|
||||||
|
|
||||||
self.assertEquals(cache.get(f"{self.user.email}-reddit-auth"), self.state)
|
|
||||||
|
|
||||||
|
|
||||||
class RedditTokenRedirectViewTestCase(TestCase):
|
|
||||||
def setUp(self):
|
|
||||||
self.user = UserFactory(email="test@test.nl", password="test")
|
|
||||||
self.client.force_login(self.user)
|
|
||||||
|
|
||||||
self.patch = patch("newsreader.accounts.views.integrations.RedditTokenTask")
|
|
||||||
self.mocked_task = self.patch.start()
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
cache.clear()
|
|
||||||
|
|
||||||
def test_simple(self):
|
|
||||||
response = self.client.get(reverse("accounts:settings:reddit-refresh"))
|
|
||||||
|
|
||||||
self.assertRedirects(response, reverse("accounts:settings:integrations"))
|
|
||||||
|
|
||||||
self.mocked_task.delay.assert_called_once_with(self.user.pk)
|
|
||||||
|
|
||||||
self.assertEquals(1, cache.get(f"{self.user.email}-reddit-refresh"))
|
|
||||||
|
|
||||||
def test_not_active(self):
|
|
||||||
cache.set(f"{self.user.email}-reddit-refresh", 1)
|
|
||||||
|
|
||||||
response = self.client.get(reverse("accounts:settings:reddit-refresh"))
|
|
||||||
|
|
||||||
self.assertRedirects(response, reverse("accounts:settings:integrations"))
|
|
||||||
|
|
||||||
self.mocked_task.delay.assert_not_called()
|
|
||||||
|
|
||||||
|
|
||||||
class RedditRevokeRedirectViewTestCase(TestCase):
|
|
||||||
def setUp(self):
|
|
||||||
self.user = UserFactory(email="test@test.nl", password="test")
|
|
||||||
self.client.force_login(self.user)
|
|
||||||
|
|
||||||
self.patch = patch("newsreader.accounts.views.integrations.revoke_reddit_token")
|
|
||||||
self.mocked_revoke = self.patch.start()
|
|
||||||
|
|
||||||
def test_simple(self):
|
|
||||||
self.user.reddit_access_token = "jadajadajada"
|
|
||||||
self.user.reddit_refresh_token = "jadajadajada"
|
|
||||||
self.user.save()
|
|
||||||
|
|
||||||
self.mocked_revoke.return_value = True
|
|
||||||
|
|
||||||
response = self.client.get(reverse("accounts:settings:reddit-revoke"))
|
|
||||||
|
|
||||||
self.assertRedirects(response, reverse("accounts:settings:integrations"))
|
|
||||||
|
|
||||||
self.mocked_revoke.assert_called_once_with(self.user)
|
|
||||||
|
|
||||||
self.user.refresh_from_db()
|
|
||||||
|
|
||||||
self.assertEquals(self.user.reddit_access_token, None)
|
|
||||||
self.assertEquals(self.user.reddit_refresh_token, None)
|
|
||||||
|
|
||||||
def test_no_refresh_token(self):
|
|
||||||
self.user.reddit_refresh_token = None
|
|
||||||
self.user.save()
|
|
||||||
|
|
||||||
response = self.client.get(reverse("accounts:settings:reddit-revoke"))
|
|
||||||
|
|
||||||
self.assertRedirects(response, reverse("accounts:settings:integrations"))
|
|
||||||
|
|
||||||
self.mocked_revoke.assert_not_called()
|
|
||||||
|
|
||||||
def test_unsuccessful_response(self):
|
|
||||||
self.user.reddit_access_token = "jadajadajada"
|
|
||||||
self.user.reddit_refresh_token = "jadajadajada"
|
|
||||||
self.user.save()
|
|
||||||
|
|
||||||
self.mocked_revoke.return_value = False
|
|
||||||
|
|
||||||
response = self.client.get(reverse("accounts:settings:reddit-revoke"))
|
|
||||||
|
|
||||||
self.assertRedirects(response, reverse("accounts:settings:integrations"))
|
|
||||||
|
|
||||||
self.user.refresh_from_db()
|
|
||||||
|
|
||||||
self.assertEquals(self.user.reddit_access_token, "jadajadajada")
|
|
||||||
self.assertEquals(self.user.reddit_refresh_token, "jadajadajada")
|
|
||||||
|
|
||||||
def test_stream_exception(self):
|
|
||||||
self.user.reddit_access_token = "jadajadajada"
|
|
||||||
self.user.reddit_refresh_token = "jadajadajada"
|
|
||||||
self.user.save()
|
|
||||||
|
|
||||||
self.mocked_revoke.side_effect = StreamException
|
|
||||||
|
|
||||||
response = self.client.get(reverse("accounts:settings:reddit-revoke"))
|
|
||||||
|
|
||||||
self.assertRedirects(response, reverse("accounts:settings:integrations"))
|
|
||||||
|
|
||||||
self.user.refresh_from_db()
|
|
||||||
|
|
||||||
self.assertEquals(self.user.reddit_access_token, "jadajadajada")
|
|
||||||
self.assertEquals(self.user.reddit_refresh_token, "jadajadajada")
|
|
||||||
|
|
@ -3,7 +3,6 @@ from django.urls import include, path
|
||||||
|
|
||||||
from newsreader.accounts.views import (
|
from newsreader.accounts.views import (
|
||||||
FaviconRedirectView,
|
FaviconRedirectView,
|
||||||
IntegrationsView,
|
|
||||||
LoginView,
|
LoginView,
|
||||||
LogoutView,
|
LogoutView,
|
||||||
PasswordChangeView,
|
PasswordChangeView,
|
||||||
|
|
@ -11,33 +10,11 @@ from newsreader.accounts.views import (
|
||||||
PasswordResetConfirmView,
|
PasswordResetConfirmView,
|
||||||
PasswordResetDoneView,
|
PasswordResetDoneView,
|
||||||
PasswordResetView,
|
PasswordResetView,
|
||||||
RedditRevokeRedirectView,
|
|
||||||
RedditTemplateView,
|
|
||||||
RedditTokenRedirectView,
|
|
||||||
SettingsView,
|
SettingsView,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
settings_patterns = [
|
settings_patterns = [
|
||||||
# Integrations
|
|
||||||
path(
|
|
||||||
"integrations/reddit/callback/",
|
|
||||||
login_required(RedditTemplateView.as_view()),
|
|
||||||
name="reddit-template",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"integrations/reddit/refresh/",
|
|
||||||
login_required(RedditTokenRedirectView.as_view()),
|
|
||||||
name="reddit-refresh",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"integrations/reddit/revoke/",
|
|
||||||
login_required(RedditRevokeRedirectView.as_view()),
|
|
||||||
name="reddit-revoke",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"integrations/", login_required(IntegrationsView.as_view()), name="integrations"
|
|
||||||
),
|
|
||||||
# Misc
|
# Misc
|
||||||
path("favicon/", login_required(FaviconRedirectView.as_view()), name="favicon"),
|
path("favicon/", login_required(FaviconRedirectView.as_view()), name="favicon"),
|
||||||
path("", login_required(SettingsView.as_view()), name="home"),
|
path("", login_required(SettingsView.as_view()), name="home"),
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,5 @@
|
||||||
from newsreader.accounts.views.auth import LoginView, LogoutView
|
from newsreader.accounts.views.auth import LoginView, LogoutView
|
||||||
from newsreader.accounts.views.favicon import FaviconRedirectView
|
from newsreader.accounts.views.favicon import FaviconRedirectView
|
||||||
from newsreader.accounts.views.integrations import (
|
|
||||||
IntegrationsView,
|
|
||||||
RedditRevokeRedirectView,
|
|
||||||
RedditTemplateView,
|
|
||||||
RedditTokenRedirectView,
|
|
||||||
)
|
|
||||||
from newsreader.accounts.views.password import (
|
from newsreader.accounts.views.password import (
|
||||||
PasswordChangeView,
|
PasswordChangeView,
|
||||||
PasswordResetCompleteView,
|
PasswordResetCompleteView,
|
||||||
|
|
@ -20,10 +14,6 @@ __all__ = [
|
||||||
"LoginView",
|
"LoginView",
|
||||||
"LogoutView",
|
"LogoutView",
|
||||||
"FaviconRedirectView",
|
"FaviconRedirectView",
|
||||||
"IntegrationsView",
|
|
||||||
"RedditRevokeRedirectView",
|
|
||||||
"RedditTemplateView",
|
|
||||||
"RedditTokenRedirectView",
|
|
||||||
"PasswordChangeView",
|
"PasswordChangeView",
|
||||||
"PasswordResetCompleteView",
|
"PasswordResetCompleteView",
|
||||||
"PasswordResetConfirmView",
|
"PasswordResetConfirmView",
|
||||||
|
|
|
||||||
|
|
@ -1,156 +0,0 @@
|
||||||
import logging
|
|
||||||
|
|
||||||
from django.contrib import messages
|
|
||||||
from django.core.cache import cache
|
|
||||||
from django.urls import reverse_lazy
|
|
||||||
from django.utils.translation import gettext as _
|
|
||||||
from django.views.generic import RedirectView, TemplateView
|
|
||||||
|
|
||||||
from newsreader.news.collection.exceptions import StreamException
|
|
||||||
from newsreader.news.collection.reddit import (
|
|
||||||
get_reddit_access_token,
|
|
||||||
get_reddit_authorization_url,
|
|
||||||
revoke_reddit_token,
|
|
||||||
)
|
|
||||||
from newsreader.news.collection.tasks import RedditTokenTask
|
|
||||||
from newsreader.utils.views import NavListMixin
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class IntegrationsView(NavListMixin, TemplateView):
|
|
||||||
template_name = "accounts/views/integrations.html"
|
|
||||||
|
|
||||||
def get_context_data(self, **kwargs):
|
|
||||||
return {
|
|
||||||
**super().get_context_data(**kwargs),
|
|
||||||
**self.get_reddit_context(**kwargs),
|
|
||||||
}
|
|
||||||
|
|
||||||
def get_reddit_context(self, **kwargs):
|
|
||||||
user = self.request.user
|
|
||||||
reddit_authorization_url = None
|
|
||||||
reddit_refresh_url = None
|
|
||||||
|
|
||||||
reddit_task_active = cache.get(f"{user.email}-reddit-refresh")
|
|
||||||
|
|
||||||
if (
|
|
||||||
user.reddit_refresh_token
|
|
||||||
and not user.reddit_access_token
|
|
||||||
and not reddit_task_active
|
|
||||||
):
|
|
||||||
reddit_refresh_url = reverse_lazy("accounts:settings:reddit-refresh")
|
|
||||||
|
|
||||||
if not user.reddit_refresh_token:
|
|
||||||
reddit_authorization_url = get_reddit_authorization_url(user)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"reddit_authorization_url": reddit_authorization_url,
|
|
||||||
"reddit_refresh_url": reddit_refresh_url,
|
|
||||||
"reddit_revoke_url": (
|
|
||||||
reverse_lazy("accounts:settings:reddit-revoke")
|
|
||||||
if not reddit_authorization_url
|
|
||||||
else None
|
|
||||||
),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class RedditTemplateView(NavListMixin, TemplateView):
|
|
||||||
template_name = "accounts/views/reddit.html"
|
|
||||||
|
|
||||||
def get(self, request, *args, **kwargs):
|
|
||||||
context = self.get_context_data(**kwargs)
|
|
||||||
|
|
||||||
error = request.GET.get("error", None)
|
|
||||||
state = request.GET.get("state", None)
|
|
||||||
code = request.GET.get("code", None)
|
|
||||||
|
|
||||||
if error:
|
|
||||||
return self.render_to_response({**context, "error": error})
|
|
||||||
|
|
||||||
if not code or not state:
|
|
||||||
return self.render_to_response(context)
|
|
||||||
|
|
||||||
cached_state = cache.get(f"{request.user.email}-reddit-auth")
|
|
||||||
|
|
||||||
if state != cached_state:
|
|
||||||
return self.render_to_response(
|
|
||||||
{
|
|
||||||
**context,
|
|
||||||
"error": _(
|
|
||||||
"The saved state for Reddit authorization did not match"
|
|
||||||
),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
access_token, refresh_token = get_reddit_access_token(code, request.user)
|
|
||||||
|
|
||||||
return self.render_to_response(
|
|
||||||
{
|
|
||||||
**context,
|
|
||||||
"access_token": access_token,
|
|
||||||
"refresh_token": refresh_token,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
except StreamException as e:
|
|
||||||
return self.render_to_response({**context, "error": str(e)})
|
|
||||||
except KeyError:
|
|
||||||
return self.render_to_response(
|
|
||||||
{
|
|
||||||
**context,
|
|
||||||
"error": _("Access and refresh token not found in response"),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class RedditTokenRedirectView(RedirectView):
|
|
||||||
url = reverse_lazy("accounts:settings:integrations")
|
|
||||||
|
|
||||||
def get(self, request, *args, **kwargs):
|
|
||||||
response = super().get(request, *args, **kwargs)
|
|
||||||
|
|
||||||
user = request.user
|
|
||||||
task_active = cache.get(f"{user.email}-reddit-refresh")
|
|
||||||
|
|
||||||
if not task_active:
|
|
||||||
RedditTokenTask.delay(user.pk)
|
|
||||||
messages.success(request, _("Access token is being retrieved"))
|
|
||||||
cache.set(f"{user.email}-reddit-refresh", 1, 300)
|
|
||||||
return response
|
|
||||||
|
|
||||||
messages.error(request, _("Unable to retrieve token"))
|
|
||||||
return response
|
|
||||||
|
|
||||||
|
|
||||||
class RedditRevokeRedirectView(RedirectView):
|
|
||||||
url = reverse_lazy("accounts:settings:integrations")
|
|
||||||
|
|
||||||
def get(self, request, *args, **kwargs):
|
|
||||||
response = super().get(request, *args, **kwargs)
|
|
||||||
|
|
||||||
user = request.user
|
|
||||||
|
|
||||||
if not user.reddit_refresh_token:
|
|
||||||
messages.error(request, _("No reddit account is linked to this account"))
|
|
||||||
return response
|
|
||||||
|
|
||||||
try:
|
|
||||||
is_revoked = revoke_reddit_token(user)
|
|
||||||
except StreamException:
|
|
||||||
logger.exception(f"Unable to revoke reddit token for {user.pk}")
|
|
||||||
|
|
||||||
messages.error(request, _("Unable to revoke reddit token"))
|
|
||||||
return response
|
|
||||||
|
|
||||||
if not is_revoked:
|
|
||||||
messages.error(request, _("Unable to revoke reddit token"))
|
|
||||||
return response
|
|
||||||
|
|
||||||
user.reddit_access_token = None
|
|
||||||
user.reddit_refresh_token = None
|
|
||||||
user.save()
|
|
||||||
|
|
||||||
messages.success(request, _("Reddit account deathorized"))
|
|
||||||
return response
|
|
||||||
|
|
@ -209,16 +209,6 @@ STATICFILES_FINDERS = [
|
||||||
# Email
|
# Email
|
||||||
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
|
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
|
||||||
|
|
||||||
# Reddit integration
|
|
||||||
REDDIT_CLIENT_ID = "CLIENT_ID"
|
|
||||||
REDDIT_CLIENT_SECRET = "CLIENT_SECRET"
|
|
||||||
REDDIT_REDIRECT_URL = (
|
|
||||||
"http://127.0.0.1:8000/accounts/settings/integrations/reddit/callback/"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Twitter integration
|
|
||||||
TWITTER_URL = "https://twitter.com"
|
|
||||||
|
|
||||||
# Third party settings
|
# Third party settings
|
||||||
AXES_HANDLER = "axes.handlers.cache.AxesCacheHandler"
|
AXES_HANDLER = "axes.handlers.cache.AxesCacheHandler"
|
||||||
AXES_CACHE = "axes"
|
AXES_CACHE = "axes"
|
||||||
|
|
|
||||||
|
|
@ -48,11 +48,6 @@ EMAIL_USE_SSL = bool(os.environ.get("EMAIL_USE_SSL"))
|
||||||
VERSION = get_current_version(debug=False)
|
VERSION = get_current_version(debug=False)
|
||||||
ENVIRONMENT = "production"
|
ENVIRONMENT = "production"
|
||||||
|
|
||||||
# Reddit integration
|
|
||||||
REDDIT_CLIENT_ID = os.environ.get("REDDIT_CLIENT_ID", "")
|
|
||||||
REDDIT_CLIENT_SECRET = os.environ.get("REDDIT_CLIENT_SECRET", "")
|
|
||||||
REDDIT_REDIRECT_URL = os.environ.get("REDDIT_CALLBACK_URL", "")
|
|
||||||
|
|
||||||
# Third party settings
|
# Third party settings
|
||||||
AXES_HANDLER = "axes.handlers.database.AxesDatabaseHandler"
|
AXES_HANDLER = "axes.handlers.database.AxesDatabaseHandler"
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,4 @@
|
||||||
class Selector {
|
class Selector {
|
||||||
onClick = ::this.onClick;
|
|
||||||
|
|
||||||
inputs = [];
|
inputs = [];
|
||||||
|
|
||||||
constructor() {
|
constructor() {
|
||||||
|
|
@ -11,7 +9,7 @@ class Selector {
|
||||||
selectAllInput.onchange = this.onClick;
|
selectAllInput.onchange = this.onClick;
|
||||||
}
|
}
|
||||||
|
|
||||||
onClick(e) {
|
onClick = (e) => {
|
||||||
const targetValue = e.target.checked;
|
const targetValue = e.target.checked;
|
||||||
|
|
||||||
this.inputs.forEach(input => {
|
this.inputs.forEach(input => {
|
||||||
|
|
|
||||||
|
|
@ -9,10 +9,6 @@ import Messages from '../../components/Messages.js';
|
||||||
import Sidebar from '../../components/Sidebar.js';
|
import Sidebar from '../../components/Sidebar.js';
|
||||||
|
|
||||||
class App extends React.Component {
|
class App extends React.Component {
|
||||||
selectCategory = ::this.selectCategory;
|
|
||||||
deselectCategory = ::this.deselectCategory;
|
|
||||||
deleteCategory = ::this.deleteCategory;
|
|
||||||
|
|
||||||
constructor(props) {
|
constructor(props) {
|
||||||
super(props);
|
super(props);
|
||||||
|
|
||||||
|
|
@ -24,15 +20,15 @@ class App extends React.Component {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
selectCategory(categoryId) {
|
selectCategory = (categoryId) => {
|
||||||
this.setState({ selectedCategoryId: categoryId });
|
this.setState({ selectedCategoryId: categoryId });
|
||||||
}
|
}
|
||||||
|
|
||||||
deselectCategory() {
|
deselectCategory = () => {
|
||||||
this.setState({ selectedCategoryId: null });
|
this.setState({ selectedCategoryId: null });
|
||||||
}
|
}
|
||||||
|
|
||||||
deleteCategory(categoryId) {
|
deleteCategory = (categoryId) => {
|
||||||
const url = `/api/categories/${categoryId}/`;
|
const url = `/api/categories/${categoryId}/`;
|
||||||
const options = {
|
const options = {
|
||||||
method: 'DELETE',
|
method: 'DELETE',
|
||||||
|
|
|
||||||
|
|
@ -33,7 +33,6 @@ class App extends React.Component {
|
||||||
<HomepageSidebar navLinks={this.props.navLinks} />
|
<HomepageSidebar navLinks={this.props.navLinks} />
|
||||||
<PostList
|
<PostList
|
||||||
feedUrl={this.props.feedUrl}
|
feedUrl={this.props.feedUrl}
|
||||||
subredditUrl={this.props.subredditUrl}
|
|
||||||
timezone={this.props.timezone}
|
timezone={this.props.timezone}
|
||||||
forwardedRef={this.postListRef}
|
forwardedRef={this.postListRef}
|
||||||
postsByType={this.props.postsByType}
|
postsByType={this.props.postsByType}
|
||||||
|
|
@ -46,7 +45,6 @@ class App extends React.Component {
|
||||||
category={this.props.category}
|
category={this.props.category}
|
||||||
selectedType={this.props.selectedType}
|
selectedType={this.props.selectedType}
|
||||||
feedUrl={this.props.feedUrl}
|
feedUrl={this.props.feedUrl}
|
||||||
subredditUrl={this.props.subredditUrl}
|
|
||||||
categoriesUrl={this.props.categoriesUrl}
|
categoriesUrl={this.props.categoriesUrl}
|
||||||
timezone={this.props.timezone}
|
timezone={this.props.timezone}
|
||||||
autoMarking={this.props.autoMarking}
|
autoMarking={this.props.autoMarking}
|
||||||
|
|
|
||||||
|
|
@ -3,139 +3,136 @@ import { connect } from 'react-redux';
|
||||||
import Cookies from 'js-cookie';
|
import Cookies from 'js-cookie';
|
||||||
|
|
||||||
import { unSelectPost, markPostRead, toggleSaved } from '../actions/posts.js';
|
import { unSelectPost, markPostRead, toggleSaved } from '../actions/posts.js';
|
||||||
import { SAVED_TYPE, SUBREDDIT } from '../constants.js';
|
import { SAVED_TYPE } from '../constants.js';
|
||||||
import { formatDatetime } from '../../../utils.js';
|
import { formatDatetime } from '../../../utils.js';
|
||||||
|
|
||||||
class PostModal extends React.Component {
|
class PostModal extends React.Component {
|
||||||
modalListener = ::this.modalListener;
|
modalListener = ::this.modalListener;
|
||||||
readTimer = null;
|
readTimer = null;
|
||||||
|
|
||||||
componentDidMount() {
|
componentDidMount() {
|
||||||
const post = { ...this.props.post };
|
const post = { ...this.props.post };
|
||||||
const markPostRead = this.props.markPostRead;
|
const markPostRead = this.props.markPostRead;
|
||||||
const token = Cookies.get('csrftoken');
|
const token = Cookies.get('csrftoken');
|
||||||
|
|
||||||
if (this.props.autoMarking && this.props.selectedType != SAVED_TYPE && !post.read) {
|
if (this.props.autoMarking && this.props.selectedType != SAVED_TYPE && !post.read) {
|
||||||
this.readTimer = setTimeout(markPostRead, 3000, post, token);
|
this.readTimer = setTimeout(markPostRead, 3000, post, token);
|
||||||
}
|
|
||||||
|
|
||||||
window.addEventListener('click', this.modalListener);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
componentWillUnmount() {
|
window.addEventListener('click', this.modalListener);
|
||||||
if (this.readTimer) {
|
}
|
||||||
clearTimeout(this.readTimer);
|
|
||||||
}
|
|
||||||
|
|
||||||
this.readTimer = null;
|
componentWillUnmount() {
|
||||||
|
if (this.readTimer) {
|
||||||
window.removeEventListener('click', this.modalListener);
|
clearTimeout(this.readTimer);
|
||||||
}
|
}
|
||||||
|
|
||||||
modalListener(e) {
|
this.readTimer = null;
|
||||||
const targetClassName = e.target.className;
|
|
||||||
|
|
||||||
if (this.props.post && targetClassName == 'modal post-modal') {
|
window.removeEventListener('click', this.modalListener);
|
||||||
this.props.unSelectPost();
|
}
|
||||||
}
|
|
||||||
|
modalListener(e) {
|
||||||
|
const targetClassName = e.target.className;
|
||||||
|
|
||||||
|
if (this.props.post && targetClassName == 'modal post-modal') {
|
||||||
|
this.props.unSelectPost();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
render() {
|
||||||
|
const post = this.props.post;
|
||||||
|
const token = Cookies.get('csrftoken');
|
||||||
|
const publicationDate = formatDatetime(post.publicationDate);
|
||||||
|
const titleClassName = post.read ? 'post__title post__title--read' : 'post__title';
|
||||||
|
const readButtonDisabled =
|
||||||
|
post.read || this.props.isUpdating || this.props.selectedType === SAVED_TYPE;
|
||||||
|
const savedIconClass = post.saved
|
||||||
|
? 'post__save post__save--saved saved-icon saved-icon--saved'
|
||||||
|
: 'post__save saved-icon';
|
||||||
|
|
||||||
|
let ruleUrl = '';
|
||||||
|
|
||||||
|
switch (this.props.rule.type) {
|
||||||
|
default:
|
||||||
|
ruleUrl = `${this.props.feedUrl}/${this.props.rule.id}/`;
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
render() {
|
return (
|
||||||
const post = this.props.post;
|
<div className="modal post-modal">
|
||||||
const token = Cookies.get('csrftoken');
|
<div className="post">
|
||||||
const publicationDate = formatDatetime(post.publicationDate);
|
<div className="post__container">
|
||||||
const titleClassName = post.read ? 'post__title post__title--read' : 'post__title';
|
<div className="post__header">
|
||||||
const readButtonDisabled =
|
<div className="post__actions">
|
||||||
post.read || this.props.isUpdating || this.props.selectedType === SAVED_TYPE;
|
<button
|
||||||
const savedIconClass = post.saved
|
className={`button read-button ${readButtonDisabled &&
|
||||||
? 'post__save post__save--saved saved-icon saved-icon--saved'
|
'button--disabled'}`}
|
||||||
: 'post__save saved-icon';
|
onClick={() =>
|
||||||
|
!readButtonDisabled && this.props.markPostRead(post, token)
|
||||||
|
}
|
||||||
|
>
|
||||||
|
<i className="fas fa-check" /> Mark as read
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
className="button post__close-button"
|
||||||
|
onClick={() => this.props.unSelectPost()}
|
||||||
|
>
|
||||||
|
<i className="fas fa-times"></i> Close
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
<div className="post__heading">
|
||||||
|
<h2 className={titleClassName}>{`${post.title} `}</h2>
|
||||||
|
<div className="post__meta">
|
||||||
|
<div className="post__text">
|
||||||
|
<span className="post__date">{publicationDate}</span>
|
||||||
|
{post.author && <span className="post__author">{post.author}</span>}
|
||||||
|
</div>
|
||||||
|
|
||||||
let ruleUrl = '';
|
<div className="post__buttons">
|
||||||
|
{this.props.category && (
|
||||||
switch (this.props.rule.type) {
|
<span
|
||||||
case SUBREDDIT:
|
className="badge post__category"
|
||||||
ruleUrl = `${this.props.subredditUrl}/${this.props.rule.id}/`;
|
title={this.props.category.name}
|
||||||
break;
|
>
|
||||||
default:
|
<a
|
||||||
ruleUrl = `${this.props.feedUrl}/${this.props.rule.id}/`;
|
href={`${this.props.categoriesUrl}/${this.props.category.id}/`}
|
||||||
break;
|
target="_blank"
|
||||||
}
|
rel="noopener noreferrer"
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="modal post-modal">
|
|
||||||
<div className="post">
|
|
||||||
<div className="post__container">
|
|
||||||
<div className="post__header">
|
|
||||||
<div className="post__actions">
|
|
||||||
<button
|
|
||||||
className={`button read-button ${readButtonDisabled &&
|
|
||||||
'button--disabled'}`}
|
|
||||||
onClick={() =>
|
|
||||||
!readButtonDisabled && this.props.markPostRead(post, token)
|
|
||||||
}
|
|
||||||
>
|
|
||||||
<i className="fas fa-check" /> Mark as read
|
|
||||||
</button>
|
|
||||||
<button
|
|
||||||
className="button post__close-button"
|
|
||||||
onClick={() => this.props.unSelectPost()}
|
|
||||||
>
|
|
||||||
<i className="fas fa-times"></i> Close
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
<div className="post__heading">
|
|
||||||
<h2 className={titleClassName}>{`${post.title} `}</h2>
|
|
||||||
<div className="post__meta">
|
|
||||||
<div className="post__text">
|
|
||||||
<span className="post__date">{publicationDate}</span>
|
|
||||||
{post.author && <span className="post__author">{post.author}</span>}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="post__buttons">
|
|
||||||
{this.props.category && (
|
|
||||||
<span
|
|
||||||
className="badge post__category"
|
|
||||||
title={this.props.category.name}
|
|
||||||
>
|
>
|
||||||
<a
|
{this.props.category.name}
|
||||||
href={`${this.props.categoriesUrl}/${this.props.category.id}/`}
|
|
||||||
target="_blank"
|
|
||||||
rel="noopener noreferrer"
|
|
||||||
>
|
|
||||||
{this.props.category.name}
|
|
||||||
</a>
|
|
||||||
</span>
|
|
||||||
)}
|
|
||||||
<span className="badge post__rule" title={this.props.rule.name}>
|
|
||||||
<a href={ruleUrl} target="_blank" rel="noopener noreferrer">
|
|
||||||
{this.props.rule.name}
|
|
||||||
</a>
|
</a>
|
||||||
</span>
|
</span>
|
||||||
<a
|
)}
|
||||||
className="post__link"
|
<span className="badge post__rule" title={this.props.rule.name}>
|
||||||
href={post.url}
|
<a href={ruleUrl} target="_blank" rel="noopener noreferrer">
|
||||||
target="_blank"
|
{this.props.rule.name}
|
||||||
rel="noopener noreferrer"
|
|
||||||
>
|
|
||||||
<i className="fas fa-external-link-alt" />
|
|
||||||
</a>
|
</a>
|
||||||
<span
|
</span>
|
||||||
className={savedIconClass}
|
<a
|
||||||
onClick={() => this.props.toggleSaved(post, token)}
|
className="post__link"
|
||||||
/>
|
href={post.url}
|
||||||
</div>
|
target="_blank"
|
||||||
|
rel="noopener noreferrer"
|
||||||
|
>
|
||||||
|
<i className="fas fa-external-link-alt" />
|
||||||
|
</a>
|
||||||
|
<span
|
||||||
|
className={savedIconClass}
|
||||||
|
onClick={() => this.props.toggleSaved(post, token)}
|
||||||
|
/>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* HTML is sanitized by the collectors */}
|
|
||||||
<div className="post__body" dangerouslySetInnerHTML={{ __html: post.body }} />
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
{/* HTML is sanitized by the collectors */}
|
||||||
|
<div className="post__body" dangerouslySetInnerHTML={{ __html: post.body }} />
|
||||||
</div>
|
</div>
|
||||||
);
|
</div>
|
||||||
}
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const mapDispatchToProps = dispatch => ({
|
const mapDispatchToProps = dispatch => ({
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,6 @@
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
|
|
||||||
export default class ScrollTop extends React.Component {
|
export default class ScrollTop extends React.Component {
|
||||||
scrollListener = ::this.scrollListener;
|
|
||||||
|
|
||||||
state = {
|
state = {
|
||||||
listenerAttached: false,
|
listenerAttached: false,
|
||||||
showTop: false,
|
showTop: false,
|
||||||
|
|
@ -17,7 +15,7 @@ export default class ScrollTop extends React.Component {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
scrollListener() {
|
scrollListener = () => {
|
||||||
const postList = this.props.postListNode;
|
const postList = this.props.postListNode;
|
||||||
const elementEnd =
|
const elementEnd =
|
||||||
postList.scrollTop + postList.offsetHeight >= postList.scrollHeight;
|
postList.scrollTop + postList.offsetHeight >= postList.scrollHeight;
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@ import React from 'react';
|
||||||
import { connect } from 'react-redux';
|
import { connect } from 'react-redux';
|
||||||
import Cookies from 'js-cookie';
|
import Cookies from 'js-cookie';
|
||||||
|
|
||||||
import { CATEGORY_TYPE, SAVED_TYPE, SUBREDDIT } from '../../constants.js';
|
import { CATEGORY_TYPE, SAVED_TYPE } from '../../constants.js';
|
||||||
import { selectPost, toggleSaved } from '../../actions/posts.js';
|
import { selectPost, toggleSaved } from '../../actions/posts.js';
|
||||||
import { formatDatetime } from '../../../../utils.js';
|
import { formatDatetime } from '../../../../utils.js';
|
||||||
|
|
||||||
|
|
@ -18,12 +18,7 @@ class PostItem extends React.Component {
|
||||||
: 'posts__header';
|
: 'posts__header';
|
||||||
const savedIconClass = post.saved ? 'saved-icon saved-icon--saved' : 'saved-icon';
|
const savedIconClass = post.saved ? 'saved-icon saved-icon--saved' : 'saved-icon';
|
||||||
|
|
||||||
let ruleUrl = '';
|
const ruleUrl = `${this.props.feedUrl}/${rule.id}/`;
|
||||||
if (rule.type === SUBREDDIT) {
|
|
||||||
ruleUrl = `${this.props.subredditUrl}/${rule.id}/`;
|
|
||||||
} else {
|
|
||||||
ruleUrl = `${this.props.feedUrl}/${rule.id}/`;
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<li className="posts__item" ref={this.props.forwardedRef}>
|
<li className="posts__item" ref={this.props.forwardedRef}>
|
||||||
|
|
|
||||||
|
|
@ -4,13 +4,11 @@ import { isEqual } from 'lodash';
|
||||||
|
|
||||||
import { fetchPostsBySection, fetchSavedPosts } from '../../actions/posts.js';
|
import { fetchPostsBySection, fetchSavedPosts } from '../../actions/posts.js';
|
||||||
import { SAVED_TYPE } from '../../constants.js';
|
import { SAVED_TYPE } from '../../constants.js';
|
||||||
import { filterPosts } from './filters.js';
|
|
||||||
|
|
||||||
import LoadingIndicator from '../../../../components/LoadingIndicator.js';
|
import LoadingIndicator from '../../../../components/LoadingIndicator.js';
|
||||||
import PostItem from './PostItem.js';
|
import PostItem from './PostItem.js';
|
||||||
|
|
||||||
class PostList extends React.Component {
|
class PostList extends React.Component {
|
||||||
handleIntersect = ::this.handleIntersect;
|
|
||||||
lastPostRef = null;
|
lastPostRef = null;
|
||||||
observer = null;
|
observer = null;
|
||||||
|
|
||||||
|
|
@ -33,7 +31,7 @@ class PostList extends React.Component {
|
||||||
this.observer.disconnect();
|
this.observer.disconnect();
|
||||||
}
|
}
|
||||||
|
|
||||||
handleIntersect(entries) {
|
handleIntersect = (entries) => {
|
||||||
entries.every(entry => {
|
entries.every(entry => {
|
||||||
if (entry.isIntersecting) {
|
if (entry.isIntersecting) {
|
||||||
this.observer.unobserve(entry.target);
|
this.observer.unobserve(entry.target);
|
||||||
|
|
@ -64,7 +62,6 @@ class PostList extends React.Component {
|
||||||
post: item,
|
post: item,
|
||||||
selected: this.props.selected,
|
selected: this.props.selected,
|
||||||
feedUrl: this.props.feedUrl,
|
feedUrl: this.props.feedUrl,
|
||||||
subredditUrl: this.props.subredditUrl,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
if (isLastItem?.id === item.id) {
|
if (isLastItem?.id === item.id) {
|
||||||
|
|
|
||||||
|
|
@ -5,9 +5,7 @@ import Cookies from 'js-cookie';
|
||||||
import { markRead } from '../../actions/selected.js';
|
import { markRead } from '../../actions/selected.js';
|
||||||
|
|
||||||
class ReadButton extends React.Component {
|
class ReadButton extends React.Component {
|
||||||
markSelectedRead = ::this.markSelectedRead;
|
markSelectedRead = () => {
|
||||||
|
|
||||||
markSelectedRead() {
|
|
||||||
const token = Cookies.get('csrftoken');
|
const token = Cookies.get('csrftoken');
|
||||||
|
|
||||||
if (this.props.selected.unread > 0) {
|
if (this.props.selected.unread > 0) {
|
||||||
|
|
|
||||||
|
|
@ -2,5 +2,4 @@ export const RULE_TYPE = 'RULE';
|
||||||
export const CATEGORY_TYPE = 'CATEGORY';
|
export const CATEGORY_TYPE = 'CATEGORY';
|
||||||
export const SAVED_TYPE = 'SAVED';
|
export const SAVED_TYPE = 'SAVED';
|
||||||
|
|
||||||
export const SUBREDDIT = 'subreddit';
|
|
||||||
export const FEED = 'feed';
|
export const FEED = 'feed';
|
||||||
|
|
|
||||||
|
|
@ -12,7 +12,7 @@ if (page) {
|
||||||
const store = configureStore();
|
const store = configureStore();
|
||||||
|
|
||||||
const settings = JSON.parse(document.getElementById('homepageSettings').textContent);
|
const settings = JSON.parse(document.getElementById('homepageSettings').textContent);
|
||||||
const { feedUrl, subredditUrl, categoriesUrl } = settings;
|
const { feedUrl, categoriesUrl } = settings;
|
||||||
|
|
||||||
const navLinks = JSON.parse(document.getElementById('Links').textContent);
|
const navLinks = JSON.parse(document.getElementById('Links').textContent);
|
||||||
|
|
||||||
|
|
@ -20,7 +20,6 @@ if (page) {
|
||||||
<Provider store={store}>
|
<Provider store={store}>
|
||||||
<App
|
<App
|
||||||
feedUrl={feedUrl.substring(1, feedUrl.length - 3)}
|
feedUrl={feedUrl.substring(1, feedUrl.length - 3)}
|
||||||
subredditUrl={subredditUrl.substring(1, subredditUrl.length - 3)}
|
|
||||||
categoriesUrl={categoriesUrl.substring(1, categoriesUrl.length - 3)}
|
categoriesUrl={categoriesUrl.substring(1, categoriesUrl.length - 3)}
|
||||||
timezone={settings.timezone}
|
timezone={settings.timezone}
|
||||||
autoMarking={settings.autoMarking}
|
autoMarking={settings.autoMarking}
|
||||||
|
|
|
||||||
|
|
@ -4,11 +4,3 @@ from django.utils.translation import gettext as _
|
||||||
|
|
||||||
class RuleTypeChoices(TextChoices):
|
class RuleTypeChoices(TextChoices):
|
||||||
feed = "feed", _("Feed")
|
feed = "feed", _("Feed")
|
||||||
subreddit = "subreddit", _("Subreddit")
|
|
||||||
twitter_timeline = "twitter_timeline", _("Twitter timeline")
|
|
||||||
|
|
||||||
|
|
||||||
class TwitterPostTypeChoices(TextChoices):
|
|
||||||
photo = "photo", _("Photo")
|
|
||||||
video = "video", _("Video")
|
|
||||||
animated_gif = "animated_gif", _("GIF")
|
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,9 @@
|
||||||
from newsreader.news.collection.forms.feed import FeedForm, OPMLImportForm
|
from newsreader.news.collection.forms.feed import FeedForm, OPMLImportForm
|
||||||
from newsreader.news.collection.forms.reddit import SubRedditForm
|
|
||||||
from newsreader.news.collection.forms.rules import CollectionRuleBulkForm
|
from newsreader.news.collection.forms.rules import CollectionRuleBulkForm
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"FeedForm",
|
"FeedForm",
|
||||||
"OPMLImportForm",
|
"OPMLImportForm",
|
||||||
"SubRedditForm",
|
|
||||||
"CollectionRuleBulkForm",
|
"CollectionRuleBulkForm",
|
||||||
]
|
]
|
||||||
|
|
|
||||||
|
|
@ -1,57 +0,0 @@
|
||||||
from django import forms
|
|
||||||
from django.core.exceptions import ValidationError
|
|
||||||
from django.utils.safestring import mark_safe
|
|
||||||
from django.utils.translation import gettext_lazy as _
|
|
||||||
|
|
||||||
from newsreader.news.collection.choices import RuleTypeChoices
|
|
||||||
from newsreader.news.collection.forms.base import CollectionRuleForm
|
|
||||||
from newsreader.news.collection.models import CollectionRule
|
|
||||||
from newsreader.news.collection.reddit import REDDIT_API_URL
|
|
||||||
|
|
||||||
|
|
||||||
def get_reddit_help_text():
|
|
||||||
return mark_safe(
|
|
||||||
"Only subreddits are supported"
|
|
||||||
" see the 'listings' section in <a className='link' target='_blank' rel='noopener noreferrer'"
|
|
||||||
" href='https://www.reddit.com/dev/api#section_listings'>the reddit API docs</a>."
|
|
||||||
" For example: <a className='link' target='_blank' rel='noopener noreferrer'"
|
|
||||||
" href='https://oauth.reddit.com/r/aww'>https://oauth.reddit.com/r/aww</a>"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class SubRedditForm(CollectionRuleForm):
|
|
||||||
url = forms.URLField(max_length=1024, help_text=get_reddit_help_text)
|
|
||||||
|
|
||||||
def clean_url(self):
|
|
||||||
url = self.cleaned_data["url"]
|
|
||||||
|
|
||||||
if not url.startswith(REDDIT_API_URL):
|
|
||||||
raise ValidationError(_("This does not look like an Reddit API URL"))
|
|
||||||
|
|
||||||
return url
|
|
||||||
|
|
||||||
def save(self, commit=True):
|
|
||||||
instance = super().save(commit=False)
|
|
||||||
|
|
||||||
instance.type = RuleTypeChoices.subreddit
|
|
||||||
|
|
||||||
if commit:
|
|
||||||
instance.save()
|
|
||||||
self.save_m2m()
|
|
||||||
|
|
||||||
return instance
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = CollectionRule
|
|
||||||
fields = (
|
|
||||||
"name",
|
|
||||||
"url",
|
|
||||||
"favicon",
|
|
||||||
"category",
|
|
||||||
"reddit_allow_nfsw",
|
|
||||||
"reddit_allow_spoiler",
|
|
||||||
"reddit_allow_viewed",
|
|
||||||
"reddit_upvotes_min",
|
|
||||||
"reddit_downvotes_max",
|
|
||||||
"reddit_comments_min",
|
|
||||||
)
|
|
||||||
|
|
@ -1,4 +1,3 @@
|
||||||
from django.conf import settings
|
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
from django.utils.translation import gettext as _
|
from django.utils.translation import gettext as _
|
||||||
|
|
@ -49,25 +48,6 @@ class CollectionRule(TimeStampedModel):
|
||||||
on_delete=models.CASCADE,
|
on_delete=models.CASCADE,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Reddit
|
|
||||||
reddit_allow_nfsw = models.BooleanField(_("Allow NSFW posts"), default=False)
|
|
||||||
reddit_allow_spoiler = models.BooleanField(_("Allow spoilers"), default=False)
|
|
||||||
reddit_allow_viewed = models.BooleanField(
|
|
||||||
_("Allow already seen posts"), default=True
|
|
||||||
)
|
|
||||||
reddit_upvotes_min = models.PositiveIntegerField(
|
|
||||||
_("Minimum amount of upvotes"), default=0
|
|
||||||
)
|
|
||||||
reddit_downvotes_max = models.PositiveIntegerField(
|
|
||||||
_("Maximum amount of downvotes"), blank=True, null=True
|
|
||||||
)
|
|
||||||
reddit_comments_min = models.PositiveIntegerField(
|
|
||||||
_("Minimum amount of comments"), default=0
|
|
||||||
)
|
|
||||||
|
|
||||||
# Twitter (legacy)
|
|
||||||
screen_name = models.CharField(max_length=255, blank=True, null=True)
|
|
||||||
|
|
||||||
objects = CollectionRuleQuerySet.as_manager()
|
objects = CollectionRuleQuerySet.as_manager()
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
|
|
@ -75,22 +55,10 @@ class CollectionRule(TimeStampedModel):
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def update_url(self):
|
def update_url(self):
|
||||||
if self.type == RuleTypeChoices.subreddit:
|
|
||||||
return reverse("news:collection:subreddit-update", kwargs={"pk": self.pk})
|
|
||||||
elif self.type == RuleTypeChoices.twitter_timeline:
|
|
||||||
return "#not-supported"
|
|
||||||
|
|
||||||
return reverse("news:collection:feed-update", kwargs={"pk": self.pk})
|
return reverse("news:collection:feed-update", kwargs={"pk": self.pk})
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def source_url(self):
|
def source_url(self):
|
||||||
if self.type == RuleTypeChoices.subreddit:
|
|
||||||
from newsreader.news.collection.reddit import REDDIT_API_URL, REDDIT_URL
|
|
||||||
|
|
||||||
return self.url.replace(REDDIT_API_URL, REDDIT_URL)
|
|
||||||
elif self.type == RuleTypeChoices.twitter_timeline:
|
|
||||||
return f"{settings.TWITTER_URL}/{self.screen_name}"
|
|
||||||
|
|
||||||
return self.url
|
return self.url
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|
|
||||||
|
|
@ -1,419 +0,0 @@
|
||||||
import logging
|
|
||||||
|
|
||||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
|
||||||
from datetime import datetime, timedelta, timezone
|
|
||||||
from html import unescape
|
|
||||||
from json.decoder import JSONDecodeError
|
|
||||||
from urllib.parse import urlencode
|
|
||||||
from uuid import uuid4
|
|
||||||
|
|
||||||
from django.conf import settings
|
|
||||||
from django.core.cache import cache
|
|
||||||
from django.utils.html import format_html
|
|
||||||
|
|
||||||
import requests
|
|
||||||
|
|
||||||
from newsreader.news.collection.base import (
|
|
||||||
PostBuilder,
|
|
||||||
PostClient,
|
|
||||||
PostCollector,
|
|
||||||
PostStream,
|
|
||||||
Scheduler,
|
|
||||||
)
|
|
||||||
from newsreader.news.collection.choices import RuleTypeChoices
|
|
||||||
from newsreader.news.collection.exceptions import (
|
|
||||||
BuilderDuplicateException,
|
|
||||||
BuilderException,
|
|
||||||
BuilderMissingDataException,
|
|
||||||
BuilderParseException,
|
|
||||||
BuilderSkippedException,
|
|
||||||
StreamDeniedException,
|
|
||||||
StreamException,
|
|
||||||
StreamParseException,
|
|
||||||
StreamTooManyException,
|
|
||||||
)
|
|
||||||
from newsreader.news.collection.models import CollectionRule
|
|
||||||
from newsreader.news.collection.tasks import RedditTokenTask
|
|
||||||
from newsreader.news.collection.utils import fetch, post, truncate_text
|
|
||||||
from newsreader.news.core.models import Post
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
REDDIT_URL = "https://www.reddit.com"
|
|
||||||
REDDIT_API_URL = "https://oauth.reddit.com"
|
|
||||||
|
|
||||||
RATE_LIMIT = 60
|
|
||||||
RATE_LIMIT_DURATION = timedelta(seconds=60)
|
|
||||||
|
|
||||||
REDDIT_IMAGE_EXTENSIONS = (".jpg", ".png", ".gif")
|
|
||||||
REDDIT_VIDEO_EXTENSIONS = (".mp4", ".gifv", ".webm")
|
|
||||||
|
|
||||||
# see type prefixes on https://www.reddit.com/dev/api/
|
|
||||||
REDDIT_POST = "t3"
|
|
||||||
|
|
||||||
|
|
||||||
def get_reddit_authorization_url(user):
|
|
||||||
state = str(uuid4())
|
|
||||||
cache.set(f"{user.email}-reddit-auth", state)
|
|
||||||
|
|
||||||
params = {
|
|
||||||
"client_id": settings.REDDIT_CLIENT_ID,
|
|
||||||
"redirect_uri": settings.REDDIT_REDIRECT_URL,
|
|
||||||
"state": state,
|
|
||||||
"response_type": "code",
|
|
||||||
"duration": "permanent",
|
|
||||||
"scope": "identity,mysubreddits,save,read",
|
|
||||||
}
|
|
||||||
|
|
||||||
authorization_url = f"{REDDIT_URL}/api/v1/authorize"
|
|
||||||
return f"{authorization_url}?{urlencode(params)}"
|
|
||||||
|
|
||||||
|
|
||||||
def get_reddit_access_token(code, user):
|
|
||||||
client_auth = requests.auth.HTTPBasicAuth(
|
|
||||||
settings.REDDIT_CLIENT_ID, settings.REDDIT_CLIENT_SECRET
|
|
||||||
)
|
|
||||||
|
|
||||||
response = post(
|
|
||||||
f"{REDDIT_URL}/api/v1/access_token",
|
|
||||||
data={
|
|
||||||
"redirect_uri": settings.REDDIT_REDIRECT_URL,
|
|
||||||
"grant_type": "authorization_code",
|
|
||||||
"code": code,
|
|
||||||
},
|
|
||||||
auth=client_auth,
|
|
||||||
)
|
|
||||||
|
|
||||||
response_data = response.json()
|
|
||||||
|
|
||||||
user.reddit_access_token = response_data["access_token"]
|
|
||||||
user.reddit_refresh_token = response_data["refresh_token"]
|
|
||||||
user.save()
|
|
||||||
|
|
||||||
cache.delete(f"{user.email}-reddit-auth")
|
|
||||||
|
|
||||||
return response_data["access_token"], response_data["refresh_token"]
|
|
||||||
|
|
||||||
|
|
||||||
# Note that the API always returns 204's with correct basic auth headers
|
|
||||||
def revoke_reddit_token(user):
|
|
||||||
client_auth = requests.auth.HTTPBasicAuth(
|
|
||||||
settings.REDDIT_CLIENT_ID, settings.REDDIT_CLIENT_SECRET
|
|
||||||
)
|
|
||||||
|
|
||||||
response = post(
|
|
||||||
f"{REDDIT_URL}/api/v1/revoke_token",
|
|
||||||
data={"token": user.reddit_refresh_token, "token_type_hint": "refresh_token"},
|
|
||||||
auth=client_auth,
|
|
||||||
)
|
|
||||||
|
|
||||||
return response.status_code == 204
|
|
||||||
|
|
||||||
|
|
||||||
class RedditBuilder(PostBuilder):
|
|
||||||
rule_type = RuleTypeChoices.subreddit
|
|
||||||
|
|
||||||
def build(self):
|
|
||||||
results = {}
|
|
||||||
|
|
||||||
if "data" not in self.payload or "children" not in self.payload["data"]:
|
|
||||||
return
|
|
||||||
|
|
||||||
entries = self.payload["data"]["children"]
|
|
||||||
|
|
||||||
for entry in entries:
|
|
||||||
try:
|
|
||||||
post = self.build_post(entry)
|
|
||||||
except BuilderDuplicateException:
|
|
||||||
logger.warning("Skipping duplicate post")
|
|
||||||
continue
|
|
||||||
except BuilderSkippedException as e:
|
|
||||||
logger.warning(e.message)
|
|
||||||
continue
|
|
||||||
except BuilderException:
|
|
||||||
logger.exception("Failed building post")
|
|
||||||
continue
|
|
||||||
|
|
||||||
identifier = post.remote_identifier
|
|
||||||
results[identifier] = post
|
|
||||||
|
|
||||||
self.instances = results.values()
|
|
||||||
|
|
||||||
def build_post(self, entry):
|
|
||||||
rule = self.stream.rule
|
|
||||||
entry_data = entry.get("data", {})
|
|
||||||
remote_identifier = entry_data.get("id", "")
|
|
||||||
kind = entry.get("kind")
|
|
||||||
|
|
||||||
if remote_identifier in self.existing_posts:
|
|
||||||
raise BuilderDuplicateException(payload=entry)
|
|
||||||
elif kind != REDDIT_POST:
|
|
||||||
raise BuilderParseException(
|
|
||||||
message=f"Payload is not an reddit post, its of kind {kind}",
|
|
||||||
payload=entry,
|
|
||||||
)
|
|
||||||
elif not entry_data:
|
|
||||||
raise BuilderMissingDataException(
|
|
||||||
message=f"Post {remote_identifier} did not contain any data",
|
|
||||||
payload=entry,
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
title = entry_data["title"]
|
|
||||||
author = entry_data["author"]
|
|
||||||
|
|
||||||
post_url_fragment = entry_data["permalink"]
|
|
||||||
direct_url = entry_data["url"]
|
|
||||||
|
|
||||||
is_text = entry_data["is_self"]
|
|
||||||
is_video = entry_data["is_video"]
|
|
||||||
|
|
||||||
is_nsfw = entry_data["over_18"]
|
|
||||||
is_spoiler = entry_data["spoiler"]
|
|
||||||
is_viewed = entry_data["clicked"]
|
|
||||||
upvotes = entry_data["ups"]
|
|
||||||
downvotes = entry_data["downs"]
|
|
||||||
comments = entry_data["num_comments"]
|
|
||||||
except KeyError as e:
|
|
||||||
raise BuilderMissingDataException(payload=entry) from e
|
|
||||||
|
|
||||||
if not rule.reddit_allow_nfsw and is_nsfw:
|
|
||||||
raise BuilderSkippedException("Rule does not allow NSFW posts")
|
|
||||||
elif not rule.reddit_allow_spoiler and is_spoiler:
|
|
||||||
raise BuilderSkippedException("Rule does not allow spoilers")
|
|
||||||
elif not rule.reddit_allow_viewed and is_viewed:
|
|
||||||
raise BuilderSkippedException("Post was already seen by user")
|
|
||||||
elif not upvotes >= rule.reddit_upvotes_min:
|
|
||||||
raise BuilderSkippedException(
|
|
||||||
"Post does not meet minimum amount of upvotes"
|
|
||||||
)
|
|
||||||
elif (
|
|
||||||
rule.reddit_downvotes_max is not None
|
|
||||||
and downvotes > rule.reddit_downvotes_max
|
|
||||||
):
|
|
||||||
raise BuilderSkippedException("Post has more downvotes than allowed")
|
|
||||||
elif not comments >= rule.reddit_comments_min:
|
|
||||||
raise BuilderSkippedException("Post does not have enough comments")
|
|
||||||
|
|
||||||
title = truncate_text(Post, "title", title)
|
|
||||||
author = truncate_text(Post, "author", author)
|
|
||||||
|
|
||||||
if is_text:
|
|
||||||
body = self.get_text_post(entry_data)
|
|
||||||
elif direct_url.endswith(REDDIT_IMAGE_EXTENSIONS):
|
|
||||||
body = self.get_image_post(title, direct_url)
|
|
||||||
elif is_video:
|
|
||||||
body = self.get_native_video_post(entry_data)
|
|
||||||
elif direct_url.endswith(REDDIT_VIDEO_EXTENSIONS):
|
|
||||||
body = self.get_video_post(direct_url)
|
|
||||||
else:
|
|
||||||
body = self.get_url_post(title, direct_url)
|
|
||||||
|
|
||||||
try:
|
|
||||||
_created_date = datetime.fromtimestamp(entry_data["created_utc"])
|
|
||||||
created_date = _created_date.replace(tzinfo=timezone.utc)
|
|
||||||
except (OverflowError, OSError) as e:
|
|
||||||
raise BuilderParseException(payload=entry) from e
|
|
||||||
except KeyError as e:
|
|
||||||
raise BuilderMissingDataException(payload=entry) from e
|
|
||||||
|
|
||||||
post_entry = {
|
|
||||||
"remote_identifier": remote_identifier,
|
|
||||||
"title": title,
|
|
||||||
"body": body,
|
|
||||||
"author": author,
|
|
||||||
"url": f"{REDDIT_URL}{post_url_fragment}",
|
|
||||||
"publication_date": created_date,
|
|
||||||
"rule": rule,
|
|
||||||
}
|
|
||||||
|
|
||||||
return Post(**post_entry)
|
|
||||||
|
|
||||||
def get_text_post(self, entry):
|
|
||||||
try:
|
|
||||||
uncleaned_body = entry["selftext_html"]
|
|
||||||
except KeyError as e:
|
|
||||||
raise BuilderMissingDataException(payload=entry) from e
|
|
||||||
|
|
||||||
unescaped_body = unescape(uncleaned_body) if uncleaned_body else ""
|
|
||||||
return self.sanitize_fragment(unescaped_body) if unescaped_body else ""
|
|
||||||
|
|
||||||
def get_image_post(self, title, url):
|
|
||||||
return format_html(
|
|
||||||
"<div><img alt='{title}' src='{url}' loading='lazy' /></div>",
|
|
||||||
url=url,
|
|
||||||
title=title,
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_native_video_post(self, entry):
|
|
||||||
try:
|
|
||||||
video_info = entry["secure_media"]["reddit_video"]
|
|
||||||
except KeyError as e:
|
|
||||||
raise BuilderMissingDataException(payload=entry) from e
|
|
||||||
|
|
||||||
return format_html(
|
|
||||||
"<div><video controls muted><source src='{url}' type='video/mp4' /></video></div>",
|
|
||||||
url=video_info["fallback_url"],
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_video_post(self, url):
|
|
||||||
extension = next(
|
|
||||||
extension.replace(".", "")
|
|
||||||
for extension in REDDIT_VIDEO_EXTENSIONS
|
|
||||||
if url.endswith(extension)
|
|
||||||
)
|
|
||||||
|
|
||||||
if extension == "gifv":
|
|
||||||
return format_html(
|
|
||||||
"<div><video controls muted><source src='{url}' type='video/mp4' /></video></div>",
|
|
||||||
url=url.replace(extension, "mp4"),
|
|
||||||
)
|
|
||||||
|
|
||||||
return format_html(
|
|
||||||
"<div><video controls muted><source src='{url}' type='video/{extension}' /></video></div>",
|
|
||||||
url=url,
|
|
||||||
extension=extension,
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_url_post(self, title, url):
|
|
||||||
return format_html(
|
|
||||||
"<div><a target='_blank' rel='noopener noreferrer' alt='{title}' href='{url}' class='link'>Direct url</a></div>",
|
|
||||||
url=url,
|
|
||||||
title=title,
|
|
||||||
)
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return f"{self.stream.rule.pk}: RedditBuilder"
|
|
||||||
|
|
||||||
|
|
||||||
class RedditStream(PostStream):
|
|
||||||
rule_type = RuleTypeChoices.subreddit
|
|
||||||
headers = {}
|
|
||||||
|
|
||||||
def __init__(self, rule):
|
|
||||||
super().__init__(rule)
|
|
||||||
|
|
||||||
self.headers = {"Authorization": f"bearer {self.rule.user.reddit_access_token}"}
|
|
||||||
|
|
||||||
def read(self):
|
|
||||||
response = fetch(self.rule.url, headers=self.headers)
|
|
||||||
|
|
||||||
return self.parse(response), self
|
|
||||||
|
|
||||||
def parse(self, response):
|
|
||||||
try:
|
|
||||||
return response.json()
|
|
||||||
except JSONDecodeError as e:
|
|
||||||
raise StreamParseException(
|
|
||||||
response=response, message="Failed parsing json"
|
|
||||||
) from e
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return f"{self.rule.pk}: RedditStream"
|
|
||||||
|
|
||||||
|
|
||||||
class RedditClient(PostClient):
|
|
||||||
stream = RedditStream
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
streams = [[self.stream(rule) for rule in batch] for batch in self.rules]
|
|
||||||
rate_limitted = False
|
|
||||||
|
|
||||||
with ThreadPoolExecutor(max_workers=10) as executor:
|
|
||||||
for batch in streams:
|
|
||||||
futures = {executor.submit(stream.read): stream for stream in batch}
|
|
||||||
|
|
||||||
if rate_limitted:
|
|
||||||
logger.warning("Aborting requests, ratelimit hit")
|
|
||||||
break
|
|
||||||
|
|
||||||
for future in as_completed(futures):
|
|
||||||
stream = futures[future]
|
|
||||||
|
|
||||||
try:
|
|
||||||
response_data = future.result()
|
|
||||||
|
|
||||||
stream.rule.error = None
|
|
||||||
stream.rule.succeeded = True
|
|
||||||
|
|
||||||
yield response_data
|
|
||||||
except StreamDeniedException as e:
|
|
||||||
logger.warning(
|
|
||||||
f"Access token expired for user {stream.rule.user.pk}"
|
|
||||||
)
|
|
||||||
|
|
||||||
stream.rule.user.reddit_access_token = None
|
|
||||||
stream.rule.user.save()
|
|
||||||
|
|
||||||
self.set_rule_error(stream.rule, e)
|
|
||||||
|
|
||||||
RedditTokenTask.delay(stream.rule.user.pk)
|
|
||||||
|
|
||||||
break
|
|
||||||
except StreamTooManyException as e:
|
|
||||||
logger.exception("Ratelimit hit, aborting batched subreddits")
|
|
||||||
|
|
||||||
self.set_rule_error(stream.rule, e)
|
|
||||||
|
|
||||||
rate_limitted = True
|
|
||||||
break
|
|
||||||
except StreamException as e:
|
|
||||||
logger.exception(
|
|
||||||
f"Stream failed reading content from {stream.rule.url}"
|
|
||||||
)
|
|
||||||
|
|
||||||
self.set_rule_error(stream.rule, e)
|
|
||||||
|
|
||||||
continue
|
|
||||||
finally:
|
|
||||||
stream.rule.last_run = datetime.now(tz=timezone.utc)
|
|
||||||
stream.rule.save()
|
|
||||||
|
|
||||||
|
|
||||||
class RedditCollector(PostCollector):
|
|
||||||
builder = RedditBuilder
|
|
||||||
client = RedditClient
|
|
||||||
|
|
||||||
|
|
||||||
class RedditScheduler(Scheduler):
|
|
||||||
max_amount = RATE_LIMIT
|
|
||||||
max_user_amount = RATE_LIMIT / 4
|
|
||||||
|
|
||||||
def __init__(self, subreddits=[]):
|
|
||||||
if not subreddits:
|
|
||||||
self.subreddits = CollectionRule.objects.filter(
|
|
||||||
type=RuleTypeChoices.subreddit,
|
|
||||||
user__reddit_access_token__isnull=False,
|
|
||||||
user__reddit_refresh_token__isnull=False,
|
|
||||||
enabled=True,
|
|
||||||
).order_by("last_run")[:200]
|
|
||||||
else:
|
|
||||||
self.subreddits = subreddits
|
|
||||||
|
|
||||||
def get_scheduled_rules(self):
|
|
||||||
rule_mapping = {}
|
|
||||||
current_amount = 0
|
|
||||||
|
|
||||||
for subreddit in self.subreddits:
|
|
||||||
user_pk = subreddit.user.pk
|
|
||||||
|
|
||||||
if current_amount == self.max_amount:
|
|
||||||
break
|
|
||||||
|
|
||||||
if user_pk in rule_mapping:
|
|
||||||
max_amount_reached = len(rule_mapping[user_pk]) == self.max_user_amount
|
|
||||||
|
|
||||||
if max_amount_reached:
|
|
||||||
continue
|
|
||||||
|
|
||||||
rule_mapping[user_pk].append(subreddit)
|
|
||||||
current_amount += 1
|
|
||||||
|
|
||||||
continue
|
|
||||||
|
|
||||||
rule_mapping[user_pk] = [subreddit]
|
|
||||||
current_amount += 1
|
|
||||||
|
|
||||||
return list(rule_mapping.values())
|
|
||||||
|
|
@ -1,19 +1,13 @@
|
||||||
from django.conf import settings
|
|
||||||
from django.core.exceptions import ObjectDoesNotExist
|
from django.core.exceptions import ObjectDoesNotExist
|
||||||
from django.core.mail import send_mail
|
|
||||||
from django.utils.translation import gettext as _
|
from django.utils.translation import gettext as _
|
||||||
|
|
||||||
import requests
|
|
||||||
|
|
||||||
from celery.exceptions import Reject
|
from celery.exceptions import Reject
|
||||||
from celery.utils.log import get_task_logger
|
from celery.utils.log import get_task_logger
|
||||||
|
|
||||||
from newsreader.accounts.models import User
|
from newsreader.accounts.models import User
|
||||||
from newsreader.celery import app
|
from newsreader.celery import app
|
||||||
from newsreader.news.collection.choices import RuleTypeChoices
|
from newsreader.news.collection.choices import RuleTypeChoices
|
||||||
from newsreader.news.collection.exceptions.stream import StreamException
|
|
||||||
from newsreader.news.collection.feed import FeedCollector
|
from newsreader.news.collection.feed import FeedCollector
|
||||||
from newsreader.news.collection.utils import post
|
|
||||||
from newsreader.utils.celery import MemCacheLock
|
from newsreader.utils.celery import MemCacheLock
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -49,84 +43,6 @@ class FeedTask(app.Task):
|
||||||
raise Reject(reason="Task already running", requeue=False)
|
raise Reject(reason="Task already running", requeue=False)
|
||||||
|
|
||||||
|
|
||||||
class RedditTask(app.Task):
|
|
||||||
name = "RedditTask"
|
|
||||||
ignore_result = True
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
from newsreader.news.collection.reddit import RedditCollector, RedditScheduler
|
|
||||||
|
|
||||||
with MemCacheLock("reddit-task", self.app.oid) as acquired:
|
|
||||||
if acquired:
|
|
||||||
logger.info("Running reddit task")
|
|
||||||
|
|
||||||
scheduler = RedditScheduler()
|
|
||||||
subreddits = scheduler.get_scheduled_rules()
|
|
||||||
|
|
||||||
collector = RedditCollector()
|
|
||||||
collector.collect(rules=subreddits)
|
|
||||||
else:
|
|
||||||
logger.warning("Cancelling task due to existing lock")
|
|
||||||
|
|
||||||
raise Reject(reason="Task already running", requeue=False)
|
|
||||||
|
|
||||||
|
|
||||||
class RedditTokenTask(app.Task):
|
|
||||||
name = "RedditTokenTask"
|
|
||||||
ignore_result = True
|
|
||||||
|
|
||||||
def run(self, user_pk):
|
|
||||||
from newsreader.news.collection.reddit import REDDIT_URL
|
|
||||||
|
|
||||||
try:
|
|
||||||
user = User.objects.get(pk=user_pk)
|
|
||||||
except ObjectDoesNotExist:
|
|
||||||
message = f"User {user_pk} does not exist"
|
|
||||||
logger.exception(message)
|
|
||||||
|
|
||||||
raise Reject(reason=message, requeue=False)
|
|
||||||
|
|
||||||
if not user.reddit_refresh_token:
|
|
||||||
raise Reject(reason=f"User {user_pk} has no refresh token", requeue=False)
|
|
||||||
|
|
||||||
client_auth = requests.auth.HTTPBasicAuth(
|
|
||||||
settings.REDDIT_CLIENT_ID, settings.REDDIT_CLIENT_SECRET
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
response = post(
|
|
||||||
f"{REDDIT_URL}/api/v1/access_token",
|
|
||||||
data={
|
|
||||||
"grant_type": "refresh_token",
|
|
||||||
"refresh_token": user.reddit_refresh_token,
|
|
||||||
},
|
|
||||||
auth=client_auth,
|
|
||||||
)
|
|
||||||
except StreamException:
|
|
||||||
logger.exception(
|
|
||||||
f"Failed refreshing reddit access token for user {user_pk}"
|
|
||||||
)
|
|
||||||
|
|
||||||
user.reddit_refresh_token = None
|
|
||||||
user.save()
|
|
||||||
|
|
||||||
message = _(
|
|
||||||
"Your Reddit account credentials have expired. Re-authenticate in"
|
|
||||||
" the settings page to keep retrieving Reddit specific information"
|
|
||||||
" from your account."
|
|
||||||
)
|
|
||||||
|
|
||||||
send_mail(
|
|
||||||
"Reddit account needs re-authentication", message, None, [user.email]
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
response_data = response.json()
|
|
||||||
|
|
||||||
user.reddit_access_token = response_data["access_token"]
|
|
||||||
user.save()
|
|
||||||
|
|
||||||
|
|
||||||
class FaviconTask(app.Task):
|
class FaviconTask(app.Task):
|
||||||
name = "FaviconTask"
|
name = "FaviconTask"
|
||||||
ignore_result = True
|
ignore_result = True
|
||||||
|
|
@ -150,15 +66,6 @@ class FaviconTask(app.Task):
|
||||||
|
|
||||||
collector = FaviconCollector()
|
collector = FaviconCollector()
|
||||||
collector.collect(rules=rules)
|
collector.collect(rules=rules)
|
||||||
|
|
||||||
third_party_rules = user.rules.enabled().exclude(
|
|
||||||
type=RuleTypeChoices.feed
|
|
||||||
)
|
|
||||||
|
|
||||||
for rule in third_party_rules:
|
|
||||||
if rule.type == RuleTypeChoices.subreddit:
|
|
||||||
rule.favicon = "https://www.reddit.com/favicon.ico"
|
|
||||||
rule.save()
|
|
||||||
else:
|
else:
|
||||||
logger.warning("Cancelling task due to existing lock")
|
logger.warning("Cancelling task due to existing lock")
|
||||||
|
|
||||||
|
|
@ -167,5 +74,3 @@ class FaviconTask(app.Task):
|
||||||
|
|
||||||
FeedTask = app.register_task(FeedTask())
|
FeedTask = app.register_task(FeedTask())
|
||||||
FaviconTask = app.register_task(FaviconTask())
|
FaviconTask = app.register_task(FaviconTask())
|
||||||
RedditTask = app.register_task(RedditTask())
|
|
||||||
RedditTokenTask = app.register_task(RedditTokenTask())
|
|
||||||
|
|
|
||||||
|
|
@ -2,139 +2,129 @@
|
||||||
{% load i18n static filters %}
|
{% load i18n static filters %}
|
||||||
|
|
||||||
{% block content %}
|
{% block content %}
|
||||||
<main id="rules--page" class="main" data-render-sidebar=true>
|
<main id="rules--page" class="main" data-render-sidebar=true>
|
||||||
<div class="main__container">
|
<div class="main__container">
|
||||||
<form class="form rules-form">
|
<form class="form rules-form">
|
||||||
{% csrf_token %}
|
{% csrf_token %}
|
||||||
|
|
||||||
<section class="section form__section form__section--actions">
|
<section class="section form__section form__section--actions">
|
||||||
<div class="form__actions">
|
<div class="form__actions">
|
||||||
<a class="link button button--confirm" href="{% url "news:collection:feed-create" %}">{% trans "Add a feed" %}</a>
|
<a class="link button button--confirm" href="{% url " news:collection:feed-create" %}">{% trans "Add a feed"
|
||||||
<a class="link button button--confirm" href="{% url "news:collection:import" %}">{% trans "Import feeds" %}</a>
|
%}</a>
|
||||||
<a class="link button button--reddit" href="{% url "news:collection:subreddit-create" %}">{% trans "Add a subreddit" %}</a>
|
<a class="link button button--confirm" href="{% url " news:collection:import" %}">{% trans "Import feeds"
|
||||||
</div>
|
%}</a>
|
||||||
</section>
|
|
||||||
|
|
||||||
<section class="section form__section form__section--actions">
|
|
||||||
<fieldset class="fieldset form__fieldset">
|
|
||||||
<input type="submit" class="button button--primary" formaction="{% url "news:collection:rules-enable" %}" formmethod="post" value="{% trans "Enable" %}" />
|
|
||||||
<input type="submit" class="button button--primary" formaction="{% url "news:collection:rules-disable" %}" formmethod="post" value="{% trans "Disable" %}" />
|
|
||||||
<input type="submit" class="button button--error" formaction="{% url "news:collection:rules-delete" %}" formmethod="post" value="{% trans "Delete" %}"/>
|
|
||||||
</fieldset>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section class="section form__section">
|
|
||||||
<table class="table rules-table" border="0" cellspacing="0">
|
|
||||||
<thead class="table__header rules-table__header">
|
|
||||||
<tr class="table__row rules-table__row">
|
|
||||||
<th class="table__heading rules-table__heading--select">
|
|
||||||
{% include "components/form/checkbox.html" with id="select-all" data_input="rules" id_for_label="select-all" %}
|
|
||||||
</th>
|
|
||||||
<th class="table__heading rules-table__heading rules-table__heading--name">{% trans "Name" %}</th>
|
|
||||||
<th class="table__heading rules-table__heading rules-table__heading--category">{% trans "Category" %}</th>
|
|
||||||
<th class="table__heading rules-table__heading rules-table__heading--url">{% trans "URL" %}</th>
|
|
||||||
<th class="table__heading rules-table__heading rules-table__heading--succeeded">{% trans "Successfuly ran" %}</th>
|
|
||||||
<th class="table__heading rules-table__heading rules-table__heading--enabled">{% trans "Enabled" %}</th>
|
|
||||||
</tr>
|
|
||||||
</thead>
|
|
||||||
<tbody class="table__body rules-table__body">
|
|
||||||
{% for rule in rules %}
|
|
||||||
<tr class="table__row {% if rule.failed %}table__row--error{% endif %} rules-table__row">
|
|
||||||
<td class="table__item rules-table__item--select">
|
|
||||||
{% with rule|id_for_label:"rules" as id_for_label %}
|
|
||||||
{% include "components/form/checkbox.html" with name="rules" value=rule.pk id=id_for_label id_for_label=id_for_label %}
|
|
||||||
{% endwith %}
|
|
||||||
</td>
|
|
||||||
|
|
||||||
<td
|
|
||||||
class="table__item rules-table__item rules-table__item--name"
|
|
||||||
title="{{ rule.name }}"
|
|
||||||
>
|
|
||||||
<a class="link" href="{{ rule.update_url }}">
|
|
||||||
{{ rule.name }}
|
|
||||||
</a>
|
|
||||||
</td>
|
|
||||||
|
|
||||||
<td
|
|
||||||
class="table__item rules-table__item rules-table__item--category"
|
|
||||||
title="{{ rule.category.name }}"
|
|
||||||
>
|
|
||||||
{% if rule.category %}
|
|
||||||
<a
|
|
||||||
class="link"
|
|
||||||
href="{% url 'news:core:category-update' pk=rule.category.pk %}"
|
|
||||||
>
|
|
||||||
{{ rule.category.name }}
|
|
||||||
</a>
|
|
||||||
{% endif %}
|
|
||||||
</td>
|
|
||||||
|
|
||||||
<td
|
|
||||||
class="table__item rules-table__item rules-table__item--url"
|
|
||||||
title="{{ rule.source_url }}"
|
|
||||||
>
|
|
||||||
<a
|
|
||||||
class="link"
|
|
||||||
href="{{ rule.source_url }}"
|
|
||||||
target="_blank"
|
|
||||||
rel="noopener noreferrer"
|
|
||||||
>
|
|
||||||
{{ rule.source_url }}
|
|
||||||
</a>
|
|
||||||
</td>
|
|
||||||
|
|
||||||
<td class="table__item rules-table__item rules-table__item--failed">
|
|
||||||
{% if rule.failed %}
|
|
||||||
<i class="fas fa-exclamation-triangle"></i>
|
|
||||||
{% else %}
|
|
||||||
<i class="fas fa-check"></i>
|
|
||||||
{% endif %}
|
|
||||||
</td>
|
|
||||||
|
|
||||||
<td class="table__item rules-table__item rules-table__item--enabled">
|
|
||||||
{% if rule.enabled %}
|
|
||||||
<i class="fas fa-check"></i>
|
|
||||||
{% else %}
|
|
||||||
<i class="fas fa-pause"></i>
|
|
||||||
{% endif %}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
{% endfor %}
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</section>
|
|
||||||
</form>
|
|
||||||
|
|
||||||
<div class="table__footer">
|
|
||||||
<div class="pagination">
|
|
||||||
<span class="pagination__previous">
|
|
||||||
{% if page_obj.has_previous %}
|
|
||||||
<a class="link button" href="?page=1">{% trans "first" %}</a>
|
|
||||||
<a class="link button" href="?page={{ page_obj.previous_page_number }}">
|
|
||||||
{% trans "previous" %}
|
|
||||||
</a>
|
|
||||||
{% endif %}
|
|
||||||
</span>
|
|
||||||
|
|
||||||
<span class="pagination__current">
|
|
||||||
{% blocktrans with current_number=page_obj.number total_count=page_obj.paginator.num_pages %}
|
|
||||||
Page {{ current_number }} of {{ total_count }}
|
|
||||||
{% endblocktrans %}
|
|
||||||
</span>
|
|
||||||
|
|
||||||
<span class="pagination__next">
|
|
||||||
{% if page_obj.has_next %}
|
|
||||||
<a class="link button" href="?page={{ page_obj.next_page_number }}">
|
|
||||||
{% trans "next" %}
|
|
||||||
</a>
|
|
||||||
|
|
||||||
<a class="link button" href="?page={{ page_obj.paginator.num_pages }}">
|
|
||||||
{% trans "last" %}
|
|
||||||
</a>
|
|
||||||
{% endif %}
|
|
||||||
</span>
|
|
||||||
</div>
|
</div>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section class="section form__section form__section--actions">
|
||||||
|
<fieldset class="fieldset form__fieldset">
|
||||||
|
<input type="submit" class="button button--primary" formaction="{% url " news:collection:rules-enable" %}"
|
||||||
|
formmethod="post" value="{% trans " Enable" %}" />
|
||||||
|
<input type="submit" class="button button--primary" formaction="{% url " news:collection:rules-disable" %}"
|
||||||
|
formmethod="post" value="{% trans " Disable" %}" />
|
||||||
|
<input type="submit" class="button button--error" formaction="{% url " news:collection:rules-delete" %}"
|
||||||
|
formmethod="post" value="{% trans " Delete" %}" />
|
||||||
|
</fieldset>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section class="section form__section">
|
||||||
|
<table class="table rules-table" border="0" cellspacing="0">
|
||||||
|
<thead class="table__header rules-table__header">
|
||||||
|
<tr class="table__row rules-table__row">
|
||||||
|
<th class="table__heading rules-table__heading--select">
|
||||||
|
{% include "components/form/checkbox.html" with id="select-all" data_input="rules"
|
||||||
|
id_for_label="select-all" %}
|
||||||
|
</th>
|
||||||
|
<th class="table__heading rules-table__heading rules-table__heading--name">{% trans "Name" %}</th>
|
||||||
|
<th class="table__heading rules-table__heading rules-table__heading--category">{% trans "Category" %}</th>
|
||||||
|
<th class="table__heading rules-table__heading rules-table__heading--url">{% trans "URL" %}</th>
|
||||||
|
<th class="table__heading rules-table__heading rules-table__heading--succeeded">{% trans "Successfuly ran"
|
||||||
|
%}</th>
|
||||||
|
<th class="table__heading rules-table__heading rules-table__heading--enabled">{% trans "Enabled" %}</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody class="table__body rules-table__body">
|
||||||
|
{% for rule in rules %}
|
||||||
|
<tr class="table__row {% if rule.failed %}table__row--error{% endif %} rules-table__row">
|
||||||
|
<td class="table__item rules-table__item--select">
|
||||||
|
{% with rule|id_for_label:"rules" as id_for_label %}
|
||||||
|
{% include "components/form/checkbox.html" with name="rules" value=rule.pk id=id_for_label
|
||||||
|
id_for_label=id_for_label %}
|
||||||
|
{% endwith %}
|
||||||
|
</td>
|
||||||
|
|
||||||
|
<td class="table__item rules-table__item rules-table__item--name" title="{{ rule.name }}">
|
||||||
|
<a class="link" href="{{ rule.update_url }}">
|
||||||
|
{{ rule.name }}
|
||||||
|
</a>
|
||||||
|
</td>
|
||||||
|
|
||||||
|
<td class="table__item rules-table__item rules-table__item--category" title="{{ rule.category.name }}">
|
||||||
|
{% if rule.category %}
|
||||||
|
<a class="link" href="{% url 'news:core:category-update' pk=rule.category.pk %}">
|
||||||
|
{{ rule.category.name }}
|
||||||
|
</a>
|
||||||
|
{% endif %}
|
||||||
|
</td>
|
||||||
|
|
||||||
|
<td class="table__item rules-table__item rules-table__item--url" title="{{ rule.source_url }}">
|
||||||
|
<a class="link" href="{{ rule.source_url }}" target="_blank" rel="noopener noreferrer">
|
||||||
|
{{ rule.source_url }}
|
||||||
|
</a>
|
||||||
|
</td>
|
||||||
|
|
||||||
|
<td class="table__item rules-table__item rules-table__item--failed">
|
||||||
|
{% if rule.failed %}
|
||||||
|
<i class="fas fa-exclamation-triangle"></i>
|
||||||
|
{% else %}
|
||||||
|
<i class="fas fa-check"></i>
|
||||||
|
{% endif %}
|
||||||
|
</td>
|
||||||
|
|
||||||
|
<td class="table__item rules-table__item rules-table__item--enabled">
|
||||||
|
{% if rule.enabled %}
|
||||||
|
<i class="fas fa-check"></i>
|
||||||
|
{% else %}
|
||||||
|
<i class="fas fa-pause"></i>
|
||||||
|
{% endif %}
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</section>
|
||||||
|
</form>
|
||||||
|
|
||||||
|
<div class="table__footer">
|
||||||
|
<div class="pagination">
|
||||||
|
<span class="pagination__previous">
|
||||||
|
{% if page_obj.has_previous %}
|
||||||
|
<a class="link button" href="?page=1">{% trans "first" %}</a>
|
||||||
|
<a class="link button" href="?page={{ page_obj.previous_page_number }}">
|
||||||
|
{% trans "previous" %}
|
||||||
|
</a>
|
||||||
|
{% endif %}
|
||||||
|
</span>
|
||||||
|
|
||||||
|
<span class="pagination__current">
|
||||||
|
{% blocktrans with current_number=page_obj.number total_count=page_obj.paginator.num_pages %}
|
||||||
|
Page {{ current_number }} of {{ total_count }}
|
||||||
|
{% endblocktrans %}
|
||||||
|
</span>
|
||||||
|
|
||||||
|
<span class="pagination__next">
|
||||||
|
{% if page_obj.has_next %}
|
||||||
|
<a class="link button" href="?page={{ page_obj.next_page_number }}">
|
||||||
|
{% trans "next" %}
|
||||||
|
</a>
|
||||||
|
|
||||||
|
<a class="link button" href="?page={{ page_obj.paginator.num_pages }}">
|
||||||
|
{% trans "last" %}
|
||||||
|
</a>
|
||||||
|
{% endif %}
|
||||||
|
</span>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</main>
|
</div>
|
||||||
|
</main>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
|
||||||
|
|
@ -1,13 +0,0 @@
|
||||||
{% extends "sidebar.html" %}
|
|
||||||
{% load static %}
|
|
||||||
|
|
||||||
|
|
||||||
{% block content %}
|
|
||||||
{% url "news:collection:rules" as cancel_url %}
|
|
||||||
|
|
||||||
<main id="subreddit--page" class="main" data-render-sidebar=true>
|
|
||||||
<div class="main__container">
|
|
||||||
{% include "components/form/form.html" with form=form title="Add a subreddit" cancel_url=cancel_url confirm_text="Add subrredit" %}
|
|
||||||
</div>
|
|
||||||
</main>
|
|
||||||
{% endblock %}
|
|
||||||
|
|
@ -1,14 +0,0 @@
|
||||||
{% extends "base.html" %}
|
|
||||||
{% load static i18n %}
|
|
||||||
|
|
||||||
{% block content %}
|
|
||||||
<main id="subreddit--page" class="main">
|
|
||||||
{% if subreddit.error %}
|
|
||||||
{% trans "Failed to retrieve posts" as title %}
|
|
||||||
{% include "components/textbox/textbox.html" with title=title body=subreddit.error class="text-section--error" only %}
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
{% url "news:collection:rules" as cancel_url %}
|
|
||||||
{% include "components/form/form.html" with form=form title="Update subreddit" cancel_url=cancel_url confirm_text="Save subreddit" %}
|
|
||||||
</main>
|
|
||||||
{% endblock %}
|
|
||||||
|
|
@ -3,7 +3,6 @@ import factory
|
||||||
from newsreader.accounts.tests.factories import UserFactory
|
from newsreader.accounts.tests.factories import UserFactory
|
||||||
from newsreader.news.collection.choices import RuleTypeChoices
|
from newsreader.news.collection.choices import RuleTypeChoices
|
||||||
from newsreader.news.collection.models import CollectionRule
|
from newsreader.news.collection.models import CollectionRule
|
||||||
from newsreader.news.collection.reddit import REDDIT_URL
|
|
||||||
|
|
||||||
|
|
||||||
class CollectionRuleFactory(factory.django.DjangoModelFactory):
|
class CollectionRuleFactory(factory.django.DjangoModelFactory):
|
||||||
|
|
@ -23,8 +22,3 @@ class CollectionRuleFactory(factory.django.DjangoModelFactory):
|
||||||
|
|
||||||
class FeedFactory(CollectionRuleFactory):
|
class FeedFactory(CollectionRuleFactory):
|
||||||
type = RuleTypeChoices.feed
|
type = RuleTypeChoices.feed
|
||||||
|
|
||||||
|
|
||||||
class SubredditFactory(CollectionRuleFactory):
|
|
||||||
type = RuleTypeChoices.subreddit
|
|
||||||
website_url = REDDIT_URL
|
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,472 +0,0 @@
|
||||||
from datetime import datetime, timezone
|
|
||||||
from unittest.mock import Mock
|
|
||||||
|
|
||||||
from django.test import TestCase
|
|
||||||
|
|
||||||
from newsreader.news.collection.reddit import RedditBuilder
|
|
||||||
from newsreader.news.collection.tests.factories import SubredditFactory
|
|
||||||
from newsreader.news.collection.tests.reddit.builder.mocks import (
|
|
||||||
author_mock,
|
|
||||||
comment_mock,
|
|
||||||
downvote_mock,
|
|
||||||
duplicate_mock,
|
|
||||||
empty_mock,
|
|
||||||
external_gifv_mock,
|
|
||||||
external_image_mock,
|
|
||||||
external_video_mock,
|
|
||||||
image_mock,
|
|
||||||
nsfw_mock,
|
|
||||||
seen_mock,
|
|
||||||
simple_mock,
|
|
||||||
spoiler_mock,
|
|
||||||
title_mock,
|
|
||||||
unknown_mock,
|
|
||||||
unsanitized_mock,
|
|
||||||
upvote_mock,
|
|
||||||
video_mock,
|
|
||||||
)
|
|
||||||
from newsreader.news.core.models import Post
|
|
||||||
from newsreader.news.core.tests.factories import RedditPostFactory
|
|
||||||
|
|
||||||
|
|
||||||
class RedditBuilderTestCase(TestCase):
|
|
||||||
def setUp(self):
|
|
||||||
self.maxDiff = None
|
|
||||||
|
|
||||||
def test_simple_mock(self):
|
|
||||||
builder = RedditBuilder
|
|
||||||
|
|
||||||
subreddit = SubredditFactory()
|
|
||||||
mock_stream = Mock(rule=subreddit)
|
|
||||||
|
|
||||||
with builder(simple_mock, mock_stream) as builder:
|
|
||||||
builder.build()
|
|
||||||
builder.save()
|
|
||||||
|
|
||||||
posts = {post.remote_identifier: post for post in Post.objects.all()}
|
|
||||||
|
|
||||||
self.assertCountEqual(
|
|
||||||
("hm0qct", "hna75r", "hngs71", "hngsj8", "hnd7cy"), posts.keys()
|
|
||||||
)
|
|
||||||
|
|
||||||
post = posts["hm0qct"]
|
|
||||||
|
|
||||||
self.assertEqual(post.rule, subreddit)
|
|
||||||
self.assertEqual(
|
|
||||||
post.title,
|
|
||||||
"Linux Experiences/Rants or Education/Certifications thread - July 06, 2020",
|
|
||||||
)
|
|
||||||
self.assertIn(
|
|
||||||
" This megathread is also to hear opinions from anyone just starting out"
|
|
||||||
" with Linux or those that have used Linux (GNU or otherwise) for a long",
|
|
||||||
post.body,
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertIn(
|
|
||||||
"<p>For those looking for certifications please use this megathread to ask about how"
|
|
||||||
" to get certified whether it's for the business world or for your own satisfaction."
|
|
||||||
' Be sure to check out <a href="/r/linuxadmin">r/linuxadmin</a> for more discussion in the'
|
|
||||||
" SysAdmin world!</p>",
|
|
||||||
post.body,
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertEqual(post.author, "AutoModerator")
|
|
||||||
self.assertEqual(
|
|
||||||
post.url,
|
|
||||||
"https://www.reddit.com/r/linux/comments/hm0qct/linux_experiencesrants_or_educationcertifications/",
|
|
||||||
)
|
|
||||||
self.assertEqual(
|
|
||||||
post.publication_date, datetime(2020, 7, 6, 6, 11, 22, tzinfo=timezone.utc)
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_empty_data(self):
|
|
||||||
builder = RedditBuilder
|
|
||||||
|
|
||||||
subreddit = SubredditFactory()
|
|
||||||
mock_stream = Mock(rule=subreddit)
|
|
||||||
|
|
||||||
with builder(empty_mock, mock_stream) as builder:
|
|
||||||
builder.build()
|
|
||||||
builder.save()
|
|
||||||
|
|
||||||
self.assertEqual(Post.objects.count(), 0)
|
|
||||||
|
|
||||||
def test_unknown_mock(self):
|
|
||||||
builder = RedditBuilder
|
|
||||||
|
|
||||||
subreddit = SubredditFactory()
|
|
||||||
mock_stream = Mock(rule=subreddit)
|
|
||||||
|
|
||||||
with builder(unknown_mock, mock_stream) as builder:
|
|
||||||
builder.build()
|
|
||||||
builder.save()
|
|
||||||
|
|
||||||
self.assertEqual(Post.objects.count(), 0)
|
|
||||||
|
|
||||||
def test_html_sanitizing(self):
|
|
||||||
builder = RedditBuilder
|
|
||||||
|
|
||||||
subreddit = SubredditFactory()
|
|
||||||
mock_stream = Mock(rule=subreddit)
|
|
||||||
|
|
||||||
with builder(unsanitized_mock, mock_stream) as builder:
|
|
||||||
builder.build()
|
|
||||||
builder.save()
|
|
||||||
|
|
||||||
posts = {post.remote_identifier: post for post in Post.objects.all()}
|
|
||||||
|
|
||||||
self.assertCountEqual(("hnd7cy",), posts.keys())
|
|
||||||
|
|
||||||
post = posts["hnd7cy"]
|
|
||||||
|
|
||||||
self.assertEqual(post.body, "<article></article>")
|
|
||||||
|
|
||||||
def test_long_author_text_is_truncated(self):
|
|
||||||
builder = RedditBuilder
|
|
||||||
|
|
||||||
subreddit = SubredditFactory()
|
|
||||||
mock_stream = Mock(rule=subreddit)
|
|
||||||
|
|
||||||
with builder(author_mock, mock_stream) as builder:
|
|
||||||
builder.build()
|
|
||||||
builder.save()
|
|
||||||
|
|
||||||
posts = {post.remote_identifier: post for post in Post.objects.all()}
|
|
||||||
|
|
||||||
self.assertCountEqual(("hnd7cy",), posts.keys())
|
|
||||||
|
|
||||||
post = posts["hnd7cy"]
|
|
||||||
|
|
||||||
self.assertEqual(post.author, "TheQuantumZeroTheQuantumZeroTheQuantumZ…")
|
|
||||||
|
|
||||||
def test_long_title_text_is_truncated(self):
|
|
||||||
builder = RedditBuilder
|
|
||||||
|
|
||||||
subreddit = SubredditFactory()
|
|
||||||
mock_stream = Mock(rule=subreddit)
|
|
||||||
|
|
||||||
with builder(title_mock, mock_stream) as builder:
|
|
||||||
builder.build()
|
|
||||||
builder.save()
|
|
||||||
|
|
||||||
posts = {post.remote_identifier: post for post in Post.objects.all()}
|
|
||||||
|
|
||||||
self.assertCountEqual(("hnd7cy",), posts.keys())
|
|
||||||
|
|
||||||
post = posts["hnd7cy"]
|
|
||||||
|
|
||||||
self.assertEqual(
|
|
||||||
post.title,
|
|
||||||
'Board statement on the LibreOffice 7.0 RC "Personal EditionBoard statement on the LibreOffice 7.0 RC "Personal Edition" label" labelBoard statement on the LibreOffice 7.0 RC "PersBoard statement on t…',
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_duplicate_in_response(self):
|
|
||||||
builder = RedditBuilder
|
|
||||||
|
|
||||||
subreddit = SubredditFactory()
|
|
||||||
mock_stream = Mock(rule=subreddit)
|
|
||||||
|
|
||||||
with builder(duplicate_mock, mock_stream) as builder:
|
|
||||||
builder.build()
|
|
||||||
builder.save()
|
|
||||||
|
|
||||||
posts = {post.remote_identifier: post for post in Post.objects.all()}
|
|
||||||
|
|
||||||
self.assertEqual(Post.objects.count(), 2)
|
|
||||||
self.assertCountEqual(("hm0qct", "hna75r"), posts.keys())
|
|
||||||
|
|
||||||
def test_duplicate_in_database(self):
|
|
||||||
builder = RedditBuilder
|
|
||||||
|
|
||||||
subreddit = SubredditFactory()
|
|
||||||
mock_stream = Mock(rule=subreddit)
|
|
||||||
|
|
||||||
RedditPostFactory(remote_identifier="hm0qct", rule=subreddit, title="foo")
|
|
||||||
|
|
||||||
with builder(simple_mock, mock_stream) as builder:
|
|
||||||
builder.build()
|
|
||||||
builder.save()
|
|
||||||
|
|
||||||
posts = {post.remote_identifier: post for post in Post.objects.all()}
|
|
||||||
|
|
||||||
self.assertEqual(Post.objects.count(), 5)
|
|
||||||
self.assertCountEqual(
|
|
||||||
("hm0qct", "hna75r", "hngs71", "hngsj8", "hnd7cy"), posts.keys()
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_image_post(self):
|
|
||||||
builder = RedditBuilder
|
|
||||||
|
|
||||||
subreddit = SubredditFactory()
|
|
||||||
mock_stream = Mock(rule=subreddit)
|
|
||||||
|
|
||||||
with builder(image_mock, mock_stream) as builder:
|
|
||||||
builder.build()
|
|
||||||
builder.save()
|
|
||||||
|
|
||||||
posts = {post.remote_identifier: post for post in Post.objects.all()}
|
|
||||||
|
|
||||||
self.assertCountEqual(("hr64xh", "hr4bxo", "hr14y5", "hr2fv0"), posts.keys())
|
|
||||||
|
|
||||||
post = posts["hr64xh"]
|
|
||||||
|
|
||||||
title = (
|
|
||||||
"Ya’ll, I just can’t... this is my "
|
|
||||||
"son, Judah. My wife and I have no "
|
|
||||||
"idea how we created such a "
|
|
||||||
"beautiful child."
|
|
||||||
)
|
|
||||||
url = "https://i.redd.it/cm2qybia1va51.jpg"
|
|
||||||
|
|
||||||
self.assertEqual(
|
|
||||||
"https://www.reddit.com/r/aww/comments/hr64xh/yall_i_just_cant_this_is_my_son_judah_my_wife_and/",
|
|
||||||
post.url,
|
|
||||||
)
|
|
||||||
self.assertEqual(
|
|
||||||
f"<div><img alt='{title}' src='{url}' loading='lazy' /></div>", post.body
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_external_image_post(self):
|
|
||||||
builder = RedditBuilder
|
|
||||||
|
|
||||||
subreddit = SubredditFactory()
|
|
||||||
mock_stream = Mock(rule=subreddit)
|
|
||||||
|
|
||||||
with builder(external_image_mock, mock_stream) as builder:
|
|
||||||
builder.build()
|
|
||||||
builder.save()
|
|
||||||
|
|
||||||
posts = {post.remote_identifier: post for post in Post.objects.all()}
|
|
||||||
|
|
||||||
self.assertCountEqual(("hr41am", "huoldn"), posts.keys())
|
|
||||||
|
|
||||||
post = posts["hr41am"]
|
|
||||||
|
|
||||||
url = "http://gfycat.com/thatalivedogwoodclubgall"
|
|
||||||
title = "Excited cows have a new brush!"
|
|
||||||
|
|
||||||
self.assertEqual(
|
|
||||||
f"<div><a target='_blank' rel='noopener noreferrer' alt='{title}' href='{url}' class='link'>Direct url</a></div>",
|
|
||||||
post.body,
|
|
||||||
)
|
|
||||||
self.assertEqual(
|
|
||||||
"https://www.reddit.com/r/aww/comments/hr41am/excited_cows_have_a_new_brush/",
|
|
||||||
post.url,
|
|
||||||
)
|
|
||||||
|
|
||||||
post = posts["huoldn"]
|
|
||||||
|
|
||||||
url = "https://i.imgur.com/usfMVUJ.jpg"
|
|
||||||
title = "Novosibirsk Zoo welcomes 16 cobalt-eyed Pallas’s cat kittens"
|
|
||||||
|
|
||||||
self.assertEqual(
|
|
||||||
f"<div><img alt='{title}' src='{url}' loading='lazy' /></div>", post.body
|
|
||||||
)
|
|
||||||
self.assertEqual(
|
|
||||||
"https://www.reddit.com/r/aww/comments/huoldn/novosibirsk_zoo_welcomes_16_cobalteyed_pallass/",
|
|
||||||
post.url,
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_video_post(self):
|
|
||||||
builder = RedditBuilder
|
|
||||||
|
|
||||||
subreddit = SubredditFactory()
|
|
||||||
mock_stream = Mock(rule=subreddit)
|
|
||||||
|
|
||||||
with builder(video_mock, mock_stream) as builder:
|
|
||||||
builder.build()
|
|
||||||
builder.save()
|
|
||||||
|
|
||||||
posts = {post.remote_identifier: post for post in Post.objects.all()}
|
|
||||||
|
|
||||||
self.assertCountEqual(("hr32jf", "hr1r00", "hqy0ny", "hr0uzh"), posts.keys())
|
|
||||||
|
|
||||||
post = posts["hr1r00"]
|
|
||||||
|
|
||||||
url = "https://v.redd.it/eyvbxaeqtta51/DASH_480.mp4?source=fallback"
|
|
||||||
|
|
||||||
self.assertEqual(
|
|
||||||
post.url,
|
|
||||||
"https://www.reddit.com/r/aww/comments/hr1r00/cool_catt_and_his_clingy_girlfriend/",
|
|
||||||
)
|
|
||||||
self.assertEqual(
|
|
||||||
f"<div><video controls muted><source src='{url}' type='video/mp4' /></video></div>",
|
|
||||||
post.body,
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_external_video_post(self):
|
|
||||||
builder = RedditBuilder
|
|
||||||
|
|
||||||
subreddit = SubredditFactory()
|
|
||||||
mock_stream = Mock(rule=subreddit)
|
|
||||||
|
|
||||||
with builder(external_video_mock, mock_stream) as builder:
|
|
||||||
builder.build()
|
|
||||||
builder.save()
|
|
||||||
|
|
||||||
post = Post.objects.get()
|
|
||||||
|
|
||||||
self.assertEqual(post.remote_identifier, "hulh8k")
|
|
||||||
|
|
||||||
self.assertEqual(
|
|
||||||
post.url,
|
|
||||||
"https://www.reddit.com/r/aww/comments/hulh8k/dog_splashing_in_water/",
|
|
||||||
)
|
|
||||||
|
|
||||||
title = "Dog splashing in water"
|
|
||||||
url = "https://gfycat.com/excellentinfantileamericanwigeon"
|
|
||||||
|
|
||||||
self.assertEqual(
|
|
||||||
f"<div><a target='_blank' rel='noopener noreferrer' alt='{title}' href='{url}' class='link'>Direct url</a></div>",
|
|
||||||
post.body,
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_external_gifv_video_post(self):
|
|
||||||
builder = RedditBuilder
|
|
||||||
|
|
||||||
subreddit = SubredditFactory()
|
|
||||||
mock_stream = Mock(rule=subreddit)
|
|
||||||
|
|
||||||
with builder(external_gifv_mock, mock_stream) as builder:
|
|
||||||
builder.build()
|
|
||||||
builder.save()
|
|
||||||
|
|
||||||
post = Post.objects.get()
|
|
||||||
|
|
||||||
self.assertEqual(post.remote_identifier, "humdlf")
|
|
||||||
|
|
||||||
self.assertEqual(
|
|
||||||
post.url, "https://www.reddit.com/r/aww/comments/humdlf/if_i_fits_i_sits/"
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertEqual(
|
|
||||||
"<div><video controls muted><source src='https://i.imgur.com/grVh2AG.mp4' type='video/mp4' /></video></div>",
|
|
||||||
post.body,
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_link_only_post(self):
|
|
||||||
builder = RedditBuilder
|
|
||||||
|
|
||||||
subreddit = SubredditFactory()
|
|
||||||
mock_stream = Mock(rule=subreddit)
|
|
||||||
|
|
||||||
with builder(simple_mock, mock_stream) as builder:
|
|
||||||
builder.build()
|
|
||||||
builder.save()
|
|
||||||
|
|
||||||
post = Post.objects.get(remote_identifier="hngsj8")
|
|
||||||
|
|
||||||
title = "KeePassXC 2.6.0 released"
|
|
||||||
url = "https://keepassxc.org/blog/2020-07-07-2.6.0-released/"
|
|
||||||
|
|
||||||
self.assertIn(
|
|
||||||
f"<div><a target='_blank' rel='noopener noreferrer' alt='{title}' href='{url}' class='link'>Direct url</a></div>",
|
|
||||||
post.body,
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertEqual(
|
|
||||||
post.url,
|
|
||||||
"https://www.reddit.com/r/linux/comments/hngsj8/keepassxc_260_released/",
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_skip_not_known_post_type(self):
|
|
||||||
builder = RedditBuilder
|
|
||||||
|
|
||||||
subreddit = SubredditFactory()
|
|
||||||
mock_stream = Mock(rule=subreddit)
|
|
||||||
|
|
||||||
with builder(unknown_mock, mock_stream) as builder:
|
|
||||||
builder.build()
|
|
||||||
builder.save()
|
|
||||||
|
|
||||||
self.assertEqual(Post.objects.count(), 0)
|
|
||||||
|
|
||||||
def test_nsfw_not_allowed(self):
|
|
||||||
builder = RedditBuilder
|
|
||||||
|
|
||||||
subreddit = SubredditFactory(reddit_allow_nfsw=False)
|
|
||||||
mock_stream = Mock(rule=subreddit)
|
|
||||||
|
|
||||||
with builder(nsfw_mock, mock_stream) as builder:
|
|
||||||
builder.build()
|
|
||||||
builder.save()
|
|
||||||
|
|
||||||
posts = {post.remote_identifier: post for post in Post.objects.all()}
|
|
||||||
|
|
||||||
self.assertEqual(Post.objects.count(), 1)
|
|
||||||
self.assertCountEqual(("hna75r",), posts.keys())
|
|
||||||
|
|
||||||
def test_spoiler_not_allowed(self):
|
|
||||||
builder = RedditBuilder
|
|
||||||
|
|
||||||
subreddit = SubredditFactory(reddit_allow_spoiler=False)
|
|
||||||
mock_stream = Mock(rule=subreddit)
|
|
||||||
|
|
||||||
with builder(spoiler_mock, mock_stream) as builder:
|
|
||||||
builder.build()
|
|
||||||
builder.save()
|
|
||||||
|
|
||||||
posts = {post.remote_identifier: post for post in Post.objects.all()}
|
|
||||||
|
|
||||||
self.assertEqual(Post.objects.count(), 1)
|
|
||||||
self.assertCountEqual(("hm0qct",), posts.keys())
|
|
||||||
|
|
||||||
def test_already_seen_not_allowed(self):
|
|
||||||
builder = RedditBuilder
|
|
||||||
|
|
||||||
subreddit = SubredditFactory(reddit_allow_viewed=False)
|
|
||||||
mock_stream = Mock(rule=subreddit)
|
|
||||||
|
|
||||||
with builder(seen_mock, mock_stream) as builder:
|
|
||||||
builder.build()
|
|
||||||
builder.save()
|
|
||||||
|
|
||||||
posts = {post.remote_identifier: post for post in Post.objects.all()}
|
|
||||||
|
|
||||||
self.assertEqual(Post.objects.count(), 1)
|
|
||||||
self.assertCountEqual(("hna75r",), posts.keys())
|
|
||||||
|
|
||||||
def test_upvote_minimum(self):
|
|
||||||
builder = RedditBuilder
|
|
||||||
|
|
||||||
subreddit = SubredditFactory(reddit_upvotes_min=100)
|
|
||||||
mock_stream = Mock(rule=subreddit)
|
|
||||||
|
|
||||||
with builder(upvote_mock, mock_stream) as builder:
|
|
||||||
builder.build()
|
|
||||||
builder.save()
|
|
||||||
|
|
||||||
posts = {post.remote_identifier: post for post in Post.objects.all()}
|
|
||||||
|
|
||||||
self.assertEqual(Post.objects.count(), 1)
|
|
||||||
self.assertCountEqual(("hna75r",), posts.keys())
|
|
||||||
|
|
||||||
def test_comments_minimum(self):
|
|
||||||
builder = RedditBuilder
|
|
||||||
|
|
||||||
subreddit = SubredditFactory(reddit_comments_min=100)
|
|
||||||
mock_stream = Mock(rule=subreddit)
|
|
||||||
|
|
||||||
with builder(comment_mock, mock_stream) as builder:
|
|
||||||
builder.build()
|
|
||||||
builder.save()
|
|
||||||
|
|
||||||
posts = {post.remote_identifier: post for post in Post.objects.all()}
|
|
||||||
|
|
||||||
self.assertEqual(Post.objects.count(), 1)
|
|
||||||
self.assertCountEqual(("hm0qct",), posts.keys())
|
|
||||||
|
|
||||||
def test_downvote_maximum(self):
|
|
||||||
builder = RedditBuilder
|
|
||||||
|
|
||||||
subreddit = SubredditFactory(reddit_downvotes_max=20)
|
|
||||||
mock_stream = Mock(rule=subreddit)
|
|
||||||
|
|
||||||
with builder(downvote_mock, mock_stream) as builder:
|
|
||||||
builder.build()
|
|
||||||
builder.save()
|
|
||||||
|
|
||||||
posts = {post.remote_identifier: post for post in Post.objects.all()}
|
|
||||||
|
|
||||||
self.assertEqual(Post.objects.count(), 1)
|
|
||||||
self.assertCountEqual(("hm0qct",), posts.keys())
|
|
||||||
|
|
@ -1,160 +0,0 @@
|
||||||
# Note that some response data is truncated
|
|
||||||
|
|
||||||
simple_mock = {
|
|
||||||
"data": {
|
|
||||||
"after": "t3_hjywyf",
|
|
||||||
"before": None,
|
|
||||||
"children": [
|
|
||||||
{
|
|
||||||
"data": {
|
|
||||||
"approved_at_utc": None,
|
|
||||||
"approved_by": None,
|
|
||||||
"archived": False,
|
|
||||||
"author": "AutoModerator",
|
|
||||||
"banned_at_utc": None,
|
|
||||||
"banned_by": None,
|
|
||||||
"category": None,
|
|
||||||
"content_categories": None,
|
|
||||||
"created": 1593605471.0,
|
|
||||||
"created_utc": 1593576671.0,
|
|
||||||
"discussion_type": None,
|
|
||||||
"distinguished": "moderator",
|
|
||||||
"domain": "self.linux",
|
|
||||||
"edited": False,
|
|
||||||
"hidden": False,
|
|
||||||
"id": "hj34ck",
|
|
||||||
"locked": False,
|
|
||||||
"name": "t3_hj34ck",
|
|
||||||
"permalink": "/r/linux/comments/hj34ck/weekly_questions_and_hardware_thread_july_01_2020/",
|
|
||||||
"pinned": False,
|
|
||||||
"selftext": "Welcome to r/linux! If you're "
|
|
||||||
"new to Linux or trying to get "
|
|
||||||
"started this thread is for you. "
|
|
||||||
"Get help here or as always, "
|
|
||||||
"check out r/linuxquestions or "
|
|
||||||
"r/linux4noobs\n"
|
|
||||||
"\n"
|
|
||||||
"This megathread is for all your "
|
|
||||||
"question needs. As we don't "
|
|
||||||
"allow questions on r/linux "
|
|
||||||
"outside of this megathread, "
|
|
||||||
"please consider using "
|
|
||||||
"r/linuxquestions or "
|
|
||||||
"r/linux4noobs for the best "
|
|
||||||
"solution to your problem.\n"
|
|
||||||
"\n"
|
|
||||||
"Ask your hardware requests here "
|
|
||||||
"too or try r/linuxhardware!",
|
|
||||||
"selftext_html": "<!-- SC_OFF "
|
|
||||||
"--><div "
|
|
||||||
'class="md"><p>Welcome '
|
|
||||||
"to <a "
|
|
||||||
'href="/r/linux">r/linux</a>! '
|
|
||||||
"If you&#39;re new to "
|
|
||||||
"Linux or trying to get "
|
|
||||||
"started this thread is for "
|
|
||||||
"you. Get help here or as "
|
|
||||||
"always, check out <a "
|
|
||||||
'href="/r/linuxquestions">r/linuxquestions</a> '
|
|
||||||
"or <a "
|
|
||||||
'href="/r/linux4noobs">r/linux4noobs</a></p>\n'
|
|
||||||
"\n"
|
|
||||||
"<p>This megathread is "
|
|
||||||
"for all your question "
|
|
||||||
"needs. As we don&#39;t "
|
|
||||||
"allow questions on <a "
|
|
||||||
'href="/r/linux">r/linux</a> '
|
|
||||||
"outside of this megathread, "
|
|
||||||
"please consider using <a "
|
|
||||||
'href="/r/linuxquestions">r/linuxquestions</a> '
|
|
||||||
"or <a "
|
|
||||||
'href="/r/linux4noobs">r/linux4noobs</a> '
|
|
||||||
"for the best solution to "
|
|
||||||
"your problem.</p>\n"
|
|
||||||
"\n"
|
|
||||||
"<p>Ask your hardware "
|
|
||||||
"requests here too or try "
|
|
||||||
"<a "
|
|
||||||
'href="/r/linuxhardware">r/linuxhardware</a>!</p>\n'
|
|
||||||
"</div><!-- SC_ON "
|
|
||||||
"-->",
|
|
||||||
"spoiler": False,
|
|
||||||
"stickied": True,
|
|
||||||
"subreddit": "linux",
|
|
||||||
"subreddit_id": "t5_2qh1a",
|
|
||||||
"subreddit_name_prefixed": "r/linux",
|
|
||||||
"title": "Weekly Questions and Hardware " "Thread - July 01, 2020",
|
|
||||||
"url": "https://www.reddit.com/r/linux/comments/hj34ck/weekly_questions_and_hardware_thread_july_01_2020/",
|
|
||||||
"visited": False,
|
|
||||||
},
|
|
||||||
"kind": "t3",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"data": {
|
|
||||||
"archived": False,
|
|
||||||
"author": "AutoModerator",
|
|
||||||
"banned_at_utc": None,
|
|
||||||
"banned_by": None,
|
|
||||||
"category": None,
|
|
||||||
"created": 1593824903.0,
|
|
||||||
"created_utc": 1593796103.0,
|
|
||||||
"discussion_type": None,
|
|
||||||
"domain": "self.linux",
|
|
||||||
"edited": False,
|
|
||||||
"hidden": False,
|
|
||||||
"id": "hkmu0t",
|
|
||||||
"name": "t3_hkmu0t",
|
|
||||||
"permalink": "/r/linux/comments/hkmu0t/weekend_fluff_linux_in_the_wild_thread_july_03/",
|
|
||||||
"pinned": False,
|
|
||||||
"saved": False,
|
|
||||||
"selftext": "Welcome to the weekend! This "
|
|
||||||
"stickied thread is for you to "
|
|
||||||
"post pictures of your ubuntu "
|
|
||||||
"2006 install disk, slackware "
|
|
||||||
"floppies, on-topic memes or "
|
|
||||||
"more.\n"
|
|
||||||
"\n"
|
|
||||||
"When it's not the weekend, be "
|
|
||||||
"sure to check out "
|
|
||||||
"r/WildLinuxAppears or "
|
|
||||||
"r/linuxmemes!",
|
|
||||||
"selftext_html": "<!-- SC_OFF "
|
|
||||||
"--><div "
|
|
||||||
'class="md"><p>Welcome '
|
|
||||||
"to the weekend! This "
|
|
||||||
"stickied thread is for you "
|
|
||||||
"to post pictures of your "
|
|
||||||
"ubuntu 2006 install disk, "
|
|
||||||
"slackware floppies, "
|
|
||||||
"on-topic memes or "
|
|
||||||
"more.</p>\n"
|
|
||||||
"\n"
|
|
||||||
"<p>When it&#39;s "
|
|
||||||
"not the weekend, be sure to "
|
|
||||||
"check out <a "
|
|
||||||
'href="/r/WildLinuxAppears">r/WildLinuxAppears</a> '
|
|
||||||
"or <a "
|
|
||||||
'href="/r/linuxmemes">r/linuxmemes</a>!</p>\n'
|
|
||||||
"</div><!-- SC_ON "
|
|
||||||
"-->",
|
|
||||||
"spoiler": False,
|
|
||||||
"stickied": True,
|
|
||||||
"subreddit": "linux",
|
|
||||||
"subreddit_id": "t5_2qh1a",
|
|
||||||
"subreddit_name_prefixed": "r/linux",
|
|
||||||
"subreddit_subscribers": 542073,
|
|
||||||
"subreddit_type": "public",
|
|
||||||
"thumbnail": "",
|
|
||||||
"title": "Weekend Fluff / Linux in the Wild "
|
|
||||||
"Thread - July 03, 2020",
|
|
||||||
"url": "https://www.reddit.com/r/linux/comments/hkmu0t/weekend_fluff_linux_in_the_wild_thread_july_03/",
|
|
||||||
"visited": False,
|
|
||||||
},
|
|
||||||
"kind": "t3",
|
|
||||||
},
|
|
||||||
],
|
|
||||||
"dist": 27,
|
|
||||||
"modhash": None,
|
|
||||||
},
|
|
||||||
"kind": "Listing",
|
|
||||||
}
|
|
||||||
|
|
@ -1,163 +0,0 @@
|
||||||
from unittest.mock import Mock, patch
|
|
||||||
from uuid import uuid4
|
|
||||||
|
|
||||||
from django.test import TestCase
|
|
||||||
from django.utils.lorem_ipsum import words
|
|
||||||
|
|
||||||
from newsreader.accounts.tests.factories import UserFactory
|
|
||||||
from newsreader.news.collection.exceptions import (
|
|
||||||
StreamDeniedException,
|
|
||||||
StreamException,
|
|
||||||
StreamNotFoundException,
|
|
||||||
StreamParseException,
|
|
||||||
StreamTimeOutException,
|
|
||||||
StreamTooManyException,
|
|
||||||
)
|
|
||||||
from newsreader.news.collection.reddit import RedditClient
|
|
||||||
from newsreader.news.collection.tests.factories import SubredditFactory
|
|
||||||
|
|
||||||
from .mocks import simple_mock
|
|
||||||
|
|
||||||
|
|
||||||
class RedditClientTestCase(TestCase):
|
|
||||||
def setUp(self):
|
|
||||||
self.maxDiff = None
|
|
||||||
|
|
||||||
self.patched_read = patch("newsreader.news.collection.reddit.RedditStream.read")
|
|
||||||
self.mocked_read = self.patched_read.start()
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
patch.stopall()
|
|
||||||
|
|
||||||
def test_client_retrieves_single_rules(self):
|
|
||||||
subreddit = SubredditFactory()
|
|
||||||
mock_stream = Mock(rule=subreddit)
|
|
||||||
|
|
||||||
self.mocked_read.return_value = (simple_mock, mock_stream)
|
|
||||||
|
|
||||||
with RedditClient([[subreddit]]) as client:
|
|
||||||
for data, stream in client:
|
|
||||||
with self.subTest(data=data, stream=stream):
|
|
||||||
self.assertEquals(data, simple_mock)
|
|
||||||
self.assertEquals(stream, mock_stream)
|
|
||||||
|
|
||||||
self.mocked_read.assert_called_once_with()
|
|
||||||
|
|
||||||
def test_client_catches_stream_exception(self):
|
|
||||||
subreddit = SubredditFactory()
|
|
||||||
|
|
||||||
self.mocked_read.side_effect = StreamException(message="Stream exception")
|
|
||||||
|
|
||||||
with RedditClient([[subreddit]]) as client:
|
|
||||||
for data, stream in client:
|
|
||||||
with self.subTest(data=data, stream=stream):
|
|
||||||
self.assertEquals(data, None)
|
|
||||||
self.assertEquals(stream, None)
|
|
||||||
self.assertEquals(stream.rule.error, "Stream exception")
|
|
||||||
self.assertEquals(stream.rule.succeeded, False)
|
|
||||||
|
|
||||||
self.mocked_read.assert_called_once_with()
|
|
||||||
|
|
||||||
def test_client_catches_stream_not_found_exception(self):
|
|
||||||
subreddit = SubredditFactory.create()
|
|
||||||
|
|
||||||
self.mocked_read.side_effect = StreamNotFoundException(
|
|
||||||
message="Stream not found"
|
|
||||||
)
|
|
||||||
|
|
||||||
with RedditClient([[subreddit]]) as client:
|
|
||||||
for data, stream in client:
|
|
||||||
with self.subTest(data=data, stream=stream):
|
|
||||||
self.assertEquals(data, None)
|
|
||||||
self.assertEquals(stream, None)
|
|
||||||
self.assertEquals(stream.rule.error, "Stream not found")
|
|
||||||
self.assertEquals(stream.rule.succeeded, False)
|
|
||||||
|
|
||||||
self.mocked_read.assert_called_once_with()
|
|
||||||
|
|
||||||
@patch("newsreader.news.collection.reddit.RedditTokenTask")
|
|
||||||
def test_client_catches_stream_denied_exception(self, mocked_task):
|
|
||||||
user = UserFactory(
|
|
||||||
reddit_access_token=str(uuid4()), reddit_refresh_token=str(uuid4())
|
|
||||||
)
|
|
||||||
subreddit = SubredditFactory(user=user)
|
|
||||||
|
|
||||||
self.mocked_read.side_effect = StreamDeniedException(message="Token expired")
|
|
||||||
|
|
||||||
with RedditClient([(subreddit,)]) as client:
|
|
||||||
results = [(data, stream) for data, stream in client]
|
|
||||||
|
|
||||||
self.mocked_read.assert_called_once_with()
|
|
||||||
mocked_task.delay.assert_called_once_with(user.pk)
|
|
||||||
|
|
||||||
self.assertEquals(len(results), 0)
|
|
||||||
|
|
||||||
user.refresh_from_db()
|
|
||||||
subreddit.refresh_from_db()
|
|
||||||
|
|
||||||
self.assertEquals(user.reddit_access_token, None)
|
|
||||||
self.assertEquals(subreddit.succeeded, False)
|
|
||||||
self.assertEquals(subreddit.error, "Token expired")
|
|
||||||
|
|
||||||
def test_client_catches_stream_timed_out_exception(self):
|
|
||||||
subreddit = SubredditFactory()
|
|
||||||
|
|
||||||
self.mocked_read.side_effect = StreamTimeOutException(
|
|
||||||
message="Stream timed out"
|
|
||||||
)
|
|
||||||
|
|
||||||
with RedditClient([[subreddit]]) as client:
|
|
||||||
for data, stream in client:
|
|
||||||
with self.subTest(data=data, stream=stream):
|
|
||||||
self.assertEquals(data, None)
|
|
||||||
self.assertEquals(stream, None)
|
|
||||||
self.assertEquals(stream.rule.error, "Stream timed out")
|
|
||||||
self.assertEquals(stream.rule.succeeded, False)
|
|
||||||
|
|
||||||
self.mocked_read.assert_called_once_with()
|
|
||||||
|
|
||||||
def test_client_catches_stream_too_many_exception(self):
|
|
||||||
subreddit = SubredditFactory()
|
|
||||||
|
|
||||||
self.mocked_read.side_effect = StreamTooManyException
|
|
||||||
|
|
||||||
with RedditClient([[subreddit]]) as client:
|
|
||||||
for data, stream in client:
|
|
||||||
with self.subTest(data=data, stream=stream):
|
|
||||||
self.assertEquals(data, None)
|
|
||||||
self.assertEquals(stream, None)
|
|
||||||
self.assertEquals(stream.rule.error, "Too many requests")
|
|
||||||
self.assertEquals(stream.rule.succeeded, False)
|
|
||||||
|
|
||||||
self.mocked_read.assert_called_once_with()
|
|
||||||
|
|
||||||
def test_client_catches_stream_parse_exception(self):
|
|
||||||
subreddit = SubredditFactory()
|
|
||||||
|
|
||||||
self.mocked_read.side_effect = StreamParseException(
|
|
||||||
message="Stream could not be parsed"
|
|
||||||
)
|
|
||||||
|
|
||||||
with RedditClient([[subreddit]]) as client:
|
|
||||||
for data, stream in client:
|
|
||||||
with self.subTest(data=data, stream=stream):
|
|
||||||
self.assertEquals(data, None)
|
|
||||||
self.assertEquals(stream, None)
|
|
||||||
self.assertEquals(stream.rule.error, "Stream could not be parsed")
|
|
||||||
self.assertEquals(stream.rule.succeeded, False)
|
|
||||||
|
|
||||||
self.mocked_read.assert_called_once_with()
|
|
||||||
|
|
||||||
def test_client_catches_long_exception_text(self):
|
|
||||||
subreddit = SubredditFactory()
|
|
||||||
|
|
||||||
self.mocked_read.side_effect = StreamParseException(message=words(1000))
|
|
||||||
|
|
||||||
with RedditClient([[subreddit]]) as client:
|
|
||||||
for data, stream in client:
|
|
||||||
self.assertEquals(data, None)
|
|
||||||
self.assertEquals(stream, None)
|
|
||||||
self.assertEquals(len(stream.rule.error), 1024)
|
|
||||||
self.assertEquals(stream.rule.succeeded, False)
|
|
||||||
|
|
||||||
self.mocked_read.assert_called_once_with()
|
|
||||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,201 +0,0 @@
|
||||||
from datetime import datetime, timezone
|
|
||||||
from unittest.mock import patch
|
|
||||||
from uuid import uuid4
|
|
||||||
|
|
||||||
from django.test import TestCase
|
|
||||||
|
|
||||||
from newsreader.news.collection.choices import RuleTypeChoices
|
|
||||||
from newsreader.news.collection.exceptions import (
|
|
||||||
StreamDeniedException,
|
|
||||||
StreamForbiddenException,
|
|
||||||
StreamNotFoundException,
|
|
||||||
StreamTimeOutException,
|
|
||||||
)
|
|
||||||
from newsreader.news.collection.reddit import RedditCollector
|
|
||||||
from newsreader.news.collection.tests.factories import SubredditFactory
|
|
||||||
from newsreader.news.collection.tests.reddit.collector.mocks import (
|
|
||||||
empty_mock,
|
|
||||||
simple_mock_1,
|
|
||||||
simple_mock_2,
|
|
||||||
)
|
|
||||||
from newsreader.news.core.models import Post
|
|
||||||
|
|
||||||
|
|
||||||
class RedditCollectorTestCase(TestCase):
|
|
||||||
def setUp(self):
|
|
||||||
self.maxDiff = None
|
|
||||||
|
|
||||||
self.patched_get = patch("newsreader.news.collection.reddit.fetch")
|
|
||||||
self.mocked_fetch = self.patched_get.start()
|
|
||||||
|
|
||||||
self.patched_parse = patch(
|
|
||||||
"newsreader.news.collection.reddit.RedditStream.parse"
|
|
||||||
)
|
|
||||||
self.mocked_parse = self.patched_parse.start()
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
patch.stopall()
|
|
||||||
|
|
||||||
def test_simple_batch(self):
|
|
||||||
self.mocked_parse.side_effect = (simple_mock_1, simple_mock_2)
|
|
||||||
|
|
||||||
rules = (
|
|
||||||
(subreddit,)
|
|
||||||
for subreddit in SubredditFactory.create_batch(
|
|
||||||
user__reddit_access_token=str(uuid4()),
|
|
||||||
user__reddit_refresh_token=str(uuid4()),
|
|
||||||
enabled=True,
|
|
||||||
size=2,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
collector = RedditCollector()
|
|
||||||
collector.collect(rules=rules)
|
|
||||||
|
|
||||||
self.assertCountEqual(
|
|
||||||
Post.objects.values_list("remote_identifier", flat=True),
|
|
||||||
(
|
|
||||||
"hm6byg",
|
|
||||||
"hpkhgj",
|
|
||||||
"hph00n",
|
|
||||||
"hp9mlw",
|
|
||||||
"hpjn8x",
|
|
||||||
"gdfaip",
|
|
||||||
"hmd2ez",
|
|
||||||
"hpr28u",
|
|
||||||
"hpps6f",
|
|
||||||
"hp7uqe",
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
for subreddit in rules:
|
|
||||||
with self.subTest(subreddit=subreddit):
|
|
||||||
self.assertEqual(subreddit.succeeded, True)
|
|
||||||
self.assertEqual(subreddit.last_run, datetime.now(tz=timezone.utc))
|
|
||||||
self.assertEqual(subreddit.error, None)
|
|
||||||
|
|
||||||
post = Post.objects.get(
|
|
||||||
remote_identifier="hph00n", rule__type=RuleTypeChoices.subreddit
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertEqual(
|
|
||||||
post.publication_date,
|
|
||||||
datetime(2020, 7, 11, 22, 23, 24, tzinfo=timezone.utc),
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertEqual(post.author, "HannahB888")
|
|
||||||
self.assertEqual(post.title, "Drake Interplanetary Smartkey thing that I made!")
|
|
||||||
self.assertEqual(
|
|
||||||
post.url,
|
|
||||||
"https://www.reddit.com/r/starcitizen/comments/hph00n/drake_interplanetary_smartkey_thing_that_i_made/",
|
|
||||||
)
|
|
||||||
|
|
||||||
post = Post.objects.get(
|
|
||||||
remote_identifier="hpr28u", rule__type=RuleTypeChoices.subreddit
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertEqual(
|
|
||||||
post.publication_date,
|
|
||||||
datetime(2020, 7, 12, 10, 29, 10, tzinfo=timezone.utc),
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertEqual(post.author, "Sebaron")
|
|
||||||
self.assertEqual(
|
|
||||||
post.title,
|
|
||||||
"I am a medical student, and I recently programmed an open-source eye-tracker for brain research",
|
|
||||||
)
|
|
||||||
self.assertEqual(
|
|
||||||
post.url,
|
|
||||||
"https://www.reddit.com/r/Python/comments/hpr28u/i_am_a_medical_student_and_i_recently_programmed/",
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_empty_batch(self):
|
|
||||||
self.mocked_parse.side_effect = (empty_mock, empty_mock)
|
|
||||||
|
|
||||||
rules = (
|
|
||||||
(subreddit,)
|
|
||||||
for subreddit in SubredditFactory.create_batch(
|
|
||||||
user__reddit_access_token=str(uuid4()),
|
|
||||||
user__reddit_refresh_token=str(uuid4()),
|
|
||||||
enabled=True,
|
|
||||||
size=2,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
collector = RedditCollector()
|
|
||||||
collector.collect(rules=rules)
|
|
||||||
|
|
||||||
self.assertEqual(Post.objects.count(), 0)
|
|
||||||
|
|
||||||
for subreddit in rules:
|
|
||||||
with self.subTest(subreddit=subreddit):
|
|
||||||
self.assertEqual(subreddit.succeeded, True)
|
|
||||||
self.assertEqual(subreddit.last_run, datetime.now(tz=timezone.utc))
|
|
||||||
self.assertEqual(subreddit.error, None)
|
|
||||||
|
|
||||||
def test_not_found(self):
|
|
||||||
self.mocked_fetch.side_effect = StreamNotFoundException
|
|
||||||
|
|
||||||
rule = SubredditFactory(
|
|
||||||
user__reddit_access_token=str(uuid4()),
|
|
||||||
user__reddit_refresh_token=str(uuid4()),
|
|
||||||
enabled=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
collector = RedditCollector()
|
|
||||||
collector.collect(rules=((rule,),))
|
|
||||||
|
|
||||||
self.assertEqual(Post.objects.count(), 0)
|
|
||||||
self.assertEqual(rule.succeeded, False)
|
|
||||||
self.assertEqual(rule.error, "Stream not found")
|
|
||||||
|
|
||||||
@patch("newsreader.news.collection.reddit.RedditTokenTask")
|
|
||||||
def test_denied(self, mocked_task):
|
|
||||||
self.mocked_fetch.side_effect = StreamDeniedException
|
|
||||||
|
|
||||||
rule = SubredditFactory(
|
|
||||||
user__reddit_access_token=str(uuid4()),
|
|
||||||
user__reddit_refresh_token=str(uuid4()),
|
|
||||||
enabled=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
collector = RedditCollector()
|
|
||||||
collector.collect(rules=((rule,),))
|
|
||||||
|
|
||||||
self.assertEqual(Post.objects.count(), 0)
|
|
||||||
self.assertEqual(rule.succeeded, False)
|
|
||||||
self.assertEqual(rule.error, "Stream does not have sufficient permissions")
|
|
||||||
|
|
||||||
mocked_task.delay.assert_called_once_with(rule.user.pk)
|
|
||||||
|
|
||||||
def test_forbidden(self):
|
|
||||||
self.mocked_fetch.side_effect = StreamForbiddenException
|
|
||||||
|
|
||||||
rule = SubredditFactory(
|
|
||||||
user__reddit_access_token=str(uuid4()),
|
|
||||||
user__reddit_refresh_token=str(uuid4()),
|
|
||||||
enabled=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
collector = RedditCollector()
|
|
||||||
collector.collect(rules=((rule,),))
|
|
||||||
|
|
||||||
self.assertEqual(Post.objects.count(), 0)
|
|
||||||
self.assertEqual(rule.succeeded, False)
|
|
||||||
self.assertEqual(rule.error, "Stream forbidden")
|
|
||||||
|
|
||||||
def test_timed_out(self):
|
|
||||||
self.mocked_fetch.side_effect = StreamTimeOutException
|
|
||||||
|
|
||||||
rule = SubredditFactory(
|
|
||||||
user__reddit_access_token=str(uuid4()),
|
|
||||||
user__reddit_refresh_token=str(uuid4()),
|
|
||||||
enabled=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
collector = RedditCollector()
|
|
||||||
collector.collect(rules=((rule,),))
|
|
||||||
|
|
||||||
self.assertEqual(Post.objects.count(), 0)
|
|
||||||
self.assertEqual(rule.succeeded, False)
|
|
||||||
self.assertEqual(rule.error, "Stream timed out")
|
|
||||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,144 +0,0 @@
|
||||||
from json.decoder import JSONDecodeError
|
|
||||||
from unittest.mock import patch
|
|
||||||
from uuid import uuid4
|
|
||||||
|
|
||||||
from django.test import TestCase
|
|
||||||
|
|
||||||
from newsreader.accounts.tests.factories import UserFactory
|
|
||||||
from newsreader.news.collection.exceptions import (
|
|
||||||
StreamDeniedException,
|
|
||||||
StreamException,
|
|
||||||
StreamForbiddenException,
|
|
||||||
StreamNotFoundException,
|
|
||||||
StreamParseException,
|
|
||||||
StreamTimeOutException,
|
|
||||||
)
|
|
||||||
from newsreader.news.collection.reddit import RedditStream
|
|
||||||
from newsreader.news.collection.tests.factories import SubredditFactory
|
|
||||||
from newsreader.news.collection.tests.reddit.stream.mocks import simple_mock
|
|
||||||
|
|
||||||
|
|
||||||
class RedditStreamTestCase(TestCase):
|
|
||||||
def setUp(self):
|
|
||||||
self.maxDiff = None
|
|
||||||
|
|
||||||
self.patched_fetch = patch("newsreader.news.collection.reddit.fetch")
|
|
||||||
self.mocked_fetch = self.patched_fetch.start()
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
patch.stopall()
|
|
||||||
|
|
||||||
def test_simple_stream(self):
|
|
||||||
self.mocked_fetch.return_value.json.return_value = simple_mock
|
|
||||||
|
|
||||||
access_token = str(uuid4())
|
|
||||||
user = UserFactory(reddit_access_token=access_token)
|
|
||||||
|
|
||||||
subreddit = SubredditFactory(user=user)
|
|
||||||
stream = RedditStream(subreddit)
|
|
||||||
|
|
||||||
data, stream = stream.read()
|
|
||||||
|
|
||||||
self.assertEquals(data, simple_mock)
|
|
||||||
self.assertEquals(stream, stream)
|
|
||||||
self.mocked_fetch.assert_called_once_with(
|
|
||||||
subreddit.url, headers={"Authorization": f"bearer {access_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_stream_raises_exception(self):
|
|
||||||
self.mocked_fetch.side_effect = StreamException
|
|
||||||
|
|
||||||
access_token = str(uuid4())
|
|
||||||
user = UserFactory(reddit_access_token=access_token)
|
|
||||||
|
|
||||||
subreddit = SubredditFactory(user=user)
|
|
||||||
stream = RedditStream(subreddit)
|
|
||||||
|
|
||||||
with self.assertRaises(StreamException):
|
|
||||||
stream.read()
|
|
||||||
|
|
||||||
self.mocked_fetch.assert_called_once_with(
|
|
||||||
subreddit.url, headers={"Authorization": f"bearer {access_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_stream_raises_denied_exception(self):
|
|
||||||
self.mocked_fetch.side_effect = StreamDeniedException
|
|
||||||
|
|
||||||
access_token = str(uuid4())
|
|
||||||
user = UserFactory(reddit_access_token=access_token)
|
|
||||||
|
|
||||||
subreddit = SubredditFactory(user=user)
|
|
||||||
stream = RedditStream(subreddit)
|
|
||||||
|
|
||||||
with self.assertRaises(StreamDeniedException):
|
|
||||||
stream.read()
|
|
||||||
|
|
||||||
self.mocked_fetch.assert_called_once_with(
|
|
||||||
subreddit.url, headers={"Authorization": f"bearer {access_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_stream_raises_not_found_exception(self):
|
|
||||||
self.mocked_fetch.side_effect = StreamNotFoundException
|
|
||||||
|
|
||||||
access_token = str(uuid4())
|
|
||||||
user = UserFactory(reddit_access_token=access_token)
|
|
||||||
|
|
||||||
subreddit = SubredditFactory(user=user)
|
|
||||||
stream = RedditStream(subreddit)
|
|
||||||
|
|
||||||
with self.assertRaises(StreamNotFoundException):
|
|
||||||
stream.read()
|
|
||||||
|
|
||||||
self.mocked_fetch.assert_called_once_with(
|
|
||||||
subreddit.url, headers={"Authorization": f"bearer {access_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_stream_raises_time_out_exception(self):
|
|
||||||
self.mocked_fetch.side_effect = StreamTimeOutException
|
|
||||||
|
|
||||||
access_token = str(uuid4())
|
|
||||||
user = UserFactory(reddit_access_token=access_token)
|
|
||||||
|
|
||||||
subreddit = SubredditFactory(user=user)
|
|
||||||
stream = RedditStream(subreddit)
|
|
||||||
|
|
||||||
with self.assertRaises(StreamTimeOutException):
|
|
||||||
stream.read()
|
|
||||||
|
|
||||||
self.mocked_fetch.assert_called_once_with(
|
|
||||||
subreddit.url, headers={"Authorization": f"bearer {access_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_stream_raises_forbidden_exception(self):
|
|
||||||
self.mocked_fetch.side_effect = StreamForbiddenException
|
|
||||||
|
|
||||||
access_token = str(uuid4())
|
|
||||||
user = UserFactory(reddit_access_token=access_token)
|
|
||||||
|
|
||||||
subreddit = SubredditFactory(user=user)
|
|
||||||
stream = RedditStream(subreddit)
|
|
||||||
|
|
||||||
with self.assertRaises(StreamForbiddenException):
|
|
||||||
stream.read()
|
|
||||||
|
|
||||||
self.mocked_fetch.assert_called_once_with(
|
|
||||||
subreddit.url, headers={"Authorization": f"bearer {access_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_stream_raises_parse_exception(self):
|
|
||||||
self.mocked_fetch.return_value.json.side_effect = JSONDecodeError(
|
|
||||||
"No json found", "{}", 5
|
|
||||||
)
|
|
||||||
|
|
||||||
access_token = str(uuid4())
|
|
||||||
user = UserFactory(reddit_access_token=access_token)
|
|
||||||
|
|
||||||
subreddit = SubredditFactory(user=user)
|
|
||||||
stream = RedditStream(subreddit)
|
|
||||||
|
|
||||||
with self.assertRaises(StreamParseException):
|
|
||||||
stream.read()
|
|
||||||
|
|
||||||
self.mocked_fetch.assert_called_once_with(
|
|
||||||
subreddit.url, headers={"Authorization": f"bearer {access_token}"}
|
|
||||||
)
|
|
||||||
|
|
@ -1,142 +0,0 @@
|
||||||
from datetime import timedelta
|
|
||||||
|
|
||||||
from django.test import TestCase
|
|
||||||
from django.utils import timezone
|
|
||||||
|
|
||||||
from freezegun import freeze_time
|
|
||||||
|
|
||||||
from newsreader.accounts.tests.factories import UserFactory
|
|
||||||
from newsreader.news.collection.choices import RuleTypeChoices
|
|
||||||
from newsreader.news.collection.reddit import RedditScheduler
|
|
||||||
from newsreader.news.collection.tests.factories import CollectionRuleFactory
|
|
||||||
|
|
||||||
|
|
||||||
@freeze_time("2019-10-30 12:30:00")
|
|
||||||
class RedditSchedulerTestCase(TestCase):
|
|
||||||
def test_simple(self):
|
|
||||||
user_1 = UserFactory(
|
|
||||||
reddit_access_token="1231414", reddit_refresh_token="5235262"
|
|
||||||
)
|
|
||||||
user_2 = UserFactory(
|
|
||||||
reddit_access_token="3414777", reddit_refresh_token="3423425"
|
|
||||||
)
|
|
||||||
|
|
||||||
user_1_rules = [
|
|
||||||
CollectionRuleFactory(
|
|
||||||
user=user_1,
|
|
||||||
type=RuleTypeChoices.subreddit,
|
|
||||||
last_run=timezone.now() - timedelta(days=4),
|
|
||||||
enabled=True,
|
|
||||||
),
|
|
||||||
CollectionRuleFactory(
|
|
||||||
user=user_1,
|
|
||||||
type=RuleTypeChoices.subreddit,
|
|
||||||
last_run=timezone.now() - timedelta(days=3),
|
|
||||||
enabled=True,
|
|
||||||
),
|
|
||||||
CollectionRuleFactory(
|
|
||||||
user=user_1,
|
|
||||||
type=RuleTypeChoices.subreddit,
|
|
||||||
last_run=timezone.now() - timedelta(days=2),
|
|
||||||
enabled=True,
|
|
||||||
),
|
|
||||||
]
|
|
||||||
|
|
||||||
user_2_rules = [
|
|
||||||
CollectionRuleFactory(
|
|
||||||
user=user_2,
|
|
||||||
type=RuleTypeChoices.subreddit,
|
|
||||||
last_run=timezone.now() - timedelta(days=4),
|
|
||||||
enabled=True,
|
|
||||||
),
|
|
||||||
CollectionRuleFactory(
|
|
||||||
user=user_2,
|
|
||||||
type=RuleTypeChoices.subreddit,
|
|
||||||
last_run=timezone.now() - timedelta(days=3),
|
|
||||||
enabled=True,
|
|
||||||
),
|
|
||||||
CollectionRuleFactory(
|
|
||||||
user=user_2,
|
|
||||||
type=RuleTypeChoices.subreddit,
|
|
||||||
last_run=timezone.now() - timedelta(days=2),
|
|
||||||
enabled=True,
|
|
||||||
),
|
|
||||||
]
|
|
||||||
|
|
||||||
scheduler = RedditScheduler()
|
|
||||||
scheduled_subreddits = scheduler.get_scheduled_rules()
|
|
||||||
|
|
||||||
user_1_batch = [subreddit.pk for subreddit in scheduled_subreddits[0]]
|
|
||||||
|
|
||||||
self.assertIn(user_1_rules[0].pk, user_1_batch)
|
|
||||||
self.assertIn(user_1_rules[1].pk, user_1_batch)
|
|
||||||
self.assertIn(user_1_rules[2].pk, user_1_batch)
|
|
||||||
|
|
||||||
user_2_batch = [subreddit.pk for subreddit in scheduled_subreddits[1]]
|
|
||||||
|
|
||||||
self.assertIn(user_2_rules[0].pk, user_2_batch)
|
|
||||||
self.assertIn(user_2_rules[1].pk, user_2_batch)
|
|
||||||
self.assertIn(user_2_rules[2].pk, user_2_batch)
|
|
||||||
|
|
||||||
def test_max_amount(self):
|
|
||||||
users = UserFactory.create_batch(
|
|
||||||
reddit_access_token="1231414", reddit_refresh_token="5235262", size=5
|
|
||||||
)
|
|
||||||
|
|
||||||
nested_rules = [
|
|
||||||
CollectionRuleFactory.create_batch(
|
|
||||||
name=f"rule-{index}",
|
|
||||||
type=RuleTypeChoices.subreddit,
|
|
||||||
last_run=timezone.now() - timedelta(seconds=index),
|
|
||||||
enabled=True,
|
|
||||||
user=user,
|
|
||||||
size=15,
|
|
||||||
)
|
|
||||||
for index, user in enumerate(users)
|
|
||||||
]
|
|
||||||
|
|
||||||
rules = [rule for rule_list in nested_rules for rule in rule_list]
|
|
||||||
|
|
||||||
scheduler = RedditScheduler()
|
|
||||||
scheduled_subreddits = [
|
|
||||||
subreddit.pk
|
|
||||||
for batch in scheduler.get_scheduled_rules()
|
|
||||||
for subreddit in batch
|
|
||||||
]
|
|
||||||
|
|
||||||
for rule in rules[16:76]:
|
|
||||||
with self.subTest(rule=rule):
|
|
||||||
self.assertIn(rule.pk, scheduled_subreddits)
|
|
||||||
|
|
||||||
for rule in rules[0:15]:
|
|
||||||
with self.subTest(rule=rule):
|
|
||||||
self.assertNotIn(rule.pk, scheduled_subreddits)
|
|
||||||
|
|
||||||
def test_max_user_amount(self):
|
|
||||||
user = UserFactory(
|
|
||||||
reddit_access_token="1231414", reddit_refresh_token="5235262"
|
|
||||||
)
|
|
||||||
|
|
||||||
rules = [
|
|
||||||
CollectionRuleFactory(
|
|
||||||
name=f"rule-{index}",
|
|
||||||
type=RuleTypeChoices.subreddit,
|
|
||||||
last_run=timezone.now() - timedelta(seconds=index),
|
|
||||||
enabled=True,
|
|
||||||
user=user,
|
|
||||||
)
|
|
||||||
for index in range(1, 17)
|
|
||||||
]
|
|
||||||
|
|
||||||
scheduler = RedditScheduler()
|
|
||||||
scheduled_subreddits = [
|
|
||||||
subreddit.pk
|
|
||||||
for batch in scheduler.get_scheduled_rules()
|
|
||||||
for subreddit in batch
|
|
||||||
]
|
|
||||||
|
|
||||||
for rule in rules[1:16]:
|
|
||||||
with self.subTest(rule=rule):
|
|
||||||
self.assertIn(rule.pk, scheduled_subreddits)
|
|
||||||
|
|
||||||
self.assertNotIn(rules[0].pk, scheduled_subreddits)
|
|
||||||
|
|
@ -88,17 +88,3 @@ class FeedUpdateViewTestCase(CollectionRuleViewTestCase, TestCase):
|
||||||
self.rule.refresh_from_db()
|
self.rule.refresh_from_db()
|
||||||
|
|
||||||
self.assertEqual(self.rule.category, None)
|
self.assertEqual(self.rule.category, None)
|
||||||
|
|
||||||
def test_rules_only(self):
|
|
||||||
rule = FeedFactory(
|
|
||||||
name="Python",
|
|
||||||
url="https://reddit.com/r/python",
|
|
||||||
user=self.user,
|
|
||||||
category=self.category,
|
|
||||||
type=RuleTypeChoices.subreddit,
|
|
||||||
)
|
|
||||||
url = reverse("news:collection:feed-update", kwargs={"pk": rule.pk})
|
|
||||||
|
|
||||||
response = self.client.get(url)
|
|
||||||
|
|
||||||
self.assertEqual(response.status_code, 404)
|
|
||||||
|
|
|
||||||
|
|
@ -1,133 +0,0 @@
|
||||||
from django.test import TestCase
|
|
||||||
from django.urls import reverse
|
|
||||||
from django.utils.translation import gettext as _
|
|
||||||
|
|
||||||
from newsreader.news.collection.choices import RuleTypeChoices
|
|
||||||
from newsreader.news.collection.models import CollectionRule
|
|
||||||
from newsreader.news.collection.reddit import REDDIT_API_URL, REDDIT_URL
|
|
||||||
from newsreader.news.collection.tests.factories import SubredditFactory
|
|
||||||
from newsreader.news.collection.tests.views.base import CollectionRuleViewTestCase
|
|
||||||
from newsreader.news.core.tests.factories import CategoryFactory
|
|
||||||
|
|
||||||
|
|
||||||
class SubRedditCreateViewTestCase(CollectionRuleViewTestCase, TestCase):
|
|
||||||
def setUp(self):
|
|
||||||
super().setUp()
|
|
||||||
|
|
||||||
self.form_data = {
|
|
||||||
"name": "new rule",
|
|
||||||
"url": f"{REDDIT_API_URL}/r/aww",
|
|
||||||
"category": str(self.category.pk),
|
|
||||||
"reddit_allow_nfsw": False,
|
|
||||||
"reddit_allow_spoiler": False,
|
|
||||||
"reddit_allow_viewed": True,
|
|
||||||
"reddit_upvotes_min": 0,
|
|
||||||
"reddit_comments_min": 0,
|
|
||||||
}
|
|
||||||
|
|
||||||
self.url = reverse("news:collection:subreddit-create")
|
|
||||||
|
|
||||||
def test_creation(self):
|
|
||||||
response = self.client.post(self.url, self.form_data)
|
|
||||||
|
|
||||||
self.assertEqual(response.status_code, 302)
|
|
||||||
|
|
||||||
rule = CollectionRule.objects.get(name="new rule")
|
|
||||||
|
|
||||||
self.assertEqual(rule.type, RuleTypeChoices.subreddit)
|
|
||||||
self.assertEqual(rule.url, f"{REDDIT_API_URL}/r/aww")
|
|
||||||
self.assertEqual(rule.favicon, None)
|
|
||||||
self.assertEqual(rule.category.pk, self.category.pk)
|
|
||||||
self.assertEqual(rule.user.pk, self.user.pk)
|
|
||||||
|
|
||||||
def test_regular_reddit_url(self):
|
|
||||||
self.form_data.update(url=f"{REDDIT_URL}/r/aww")
|
|
||||||
|
|
||||||
response = self.client.post(self.url, self.form_data)
|
|
||||||
|
|
||||||
self.assertContains(response, _("This does not look like an Reddit API URL"))
|
|
||||||
|
|
||||||
|
|
||||||
class SubRedditUpdateViewTestCase(CollectionRuleViewTestCase, TestCase):
|
|
||||||
def setUp(self):
|
|
||||||
super().setUp()
|
|
||||||
|
|
||||||
self.rule = SubredditFactory(
|
|
||||||
name="Python",
|
|
||||||
url=f"{REDDIT_API_URL}/r/python.json",
|
|
||||||
user=self.user,
|
|
||||||
category=self.category,
|
|
||||||
type=RuleTypeChoices.subreddit,
|
|
||||||
)
|
|
||||||
self.url = reverse(
|
|
||||||
"news:collection:subreddit-update", kwargs={"pk": self.rule.pk}
|
|
||||||
)
|
|
||||||
|
|
||||||
self.form_data = {
|
|
||||||
"name": self.rule.name,
|
|
||||||
"url": self.rule.url,
|
|
||||||
"category": str(self.category.pk),
|
|
||||||
"reddit_allow_nfsw": False,
|
|
||||||
"reddit_allow_spoiler": False,
|
|
||||||
"reddit_allow_viewed": True,
|
|
||||||
"reddit_upvotes_min": 0,
|
|
||||||
"reddit_comments_min": 0,
|
|
||||||
}
|
|
||||||
|
|
||||||
def test_name_change(self):
|
|
||||||
self.form_data.update(name="Python 2")
|
|
||||||
|
|
||||||
response = self.client.post(self.url, self.form_data)
|
|
||||||
self.assertEqual(response.status_code, 302)
|
|
||||||
|
|
||||||
self.rule.refresh_from_db()
|
|
||||||
|
|
||||||
self.assertEqual(self.rule.name, "Python 2")
|
|
||||||
|
|
||||||
def test_category_change(self):
|
|
||||||
new_category = CategoryFactory(user=self.user)
|
|
||||||
|
|
||||||
self.form_data.update(category=new_category.pk)
|
|
||||||
|
|
||||||
response = self.client.post(self.url, self.form_data)
|
|
||||||
self.assertEqual(response.status_code, 302)
|
|
||||||
|
|
||||||
self.rule.refresh_from_db()
|
|
||||||
|
|
||||||
self.assertEqual(self.rule.category.pk, new_category.pk)
|
|
||||||
|
|
||||||
def test_subreddit_rules_only(self):
|
|
||||||
rule = SubredditFactory(
|
|
||||||
name="Fake subreddit",
|
|
||||||
url="https://leddit.com/r/python",
|
|
||||||
user=self.user,
|
|
||||||
category=self.category,
|
|
||||||
type=RuleTypeChoices.feed,
|
|
||||||
)
|
|
||||||
url = reverse("news:collection:subreddit-update", kwargs={"pk": rule.pk})
|
|
||||||
|
|
||||||
response = self.client.get(url)
|
|
||||||
|
|
||||||
self.assertEqual(response.status_code, 404)
|
|
||||||
|
|
||||||
def test_url_change(self):
|
|
||||||
self.form_data.update(name="aww", url=f"{REDDIT_API_URL}/r/aww")
|
|
||||||
|
|
||||||
response = self.client.post(self.url, self.form_data)
|
|
||||||
|
|
||||||
self.assertEqual(response.status_code, 302)
|
|
||||||
|
|
||||||
rule = CollectionRule.objects.get(name="aww")
|
|
||||||
|
|
||||||
self.assertEqual(rule.type, RuleTypeChoices.subreddit)
|
|
||||||
self.assertEqual(rule.url, f"{REDDIT_API_URL}/r/aww")
|
|
||||||
self.assertEqual(rule.favicon, None)
|
|
||||||
self.assertEqual(rule.category.pk, self.category.pk)
|
|
||||||
self.assertEqual(rule.user.pk, self.user.pk)
|
|
||||||
|
|
||||||
def test_regular_reddit_url(self):
|
|
||||||
self.form_data.update(url=f"{REDDIT_URL}/r/aww")
|
|
||||||
|
|
||||||
response = self.client.post(self.url, self.form_data)
|
|
||||||
|
|
||||||
self.assertContains(response, _("This does not look like an Reddit API URL"))
|
|
||||||
|
|
@ -14,8 +14,6 @@ from newsreader.news.collection.views import (
|
||||||
FeedCreateView,
|
FeedCreateView,
|
||||||
FeedUpdateView,
|
FeedUpdateView,
|
||||||
OPMLImportView,
|
OPMLImportView,
|
||||||
SubRedditCreateView,
|
|
||||||
SubRedditUpdateView,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -49,15 +47,4 @@ urlpatterns = [
|
||||||
name="rules-disable",
|
name="rules-disable",
|
||||||
),
|
),
|
||||||
path("rules/import/", login_required(OPMLImportView.as_view()), name="import"),
|
path("rules/import/", login_required(OPMLImportView.as_view()), name="import"),
|
||||||
# Reddit
|
|
||||||
path(
|
|
||||||
"subreddits/create/",
|
|
||||||
login_required(SubRedditCreateView.as_view()),
|
|
||||||
name="subreddit-create",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"subreddits/<int:pk>/",
|
|
||||||
login_required(SubRedditUpdateView.as_view()),
|
|
||||||
name="subreddit-update",
|
|
||||||
),
|
|
||||||
]
|
]
|
||||||
|
|
|
||||||
|
|
@ -3,10 +3,7 @@ from newsreader.news.collection.views.feed import (
|
||||||
FeedUpdateView,
|
FeedUpdateView,
|
||||||
OPMLImportView,
|
OPMLImportView,
|
||||||
)
|
)
|
||||||
from newsreader.news.collection.views.reddit import (
|
|
||||||
SubRedditCreateView,
|
|
||||||
SubRedditUpdateView,
|
|
||||||
)
|
|
||||||
from newsreader.news.collection.views.rules import (
|
from newsreader.news.collection.views.rules import (
|
||||||
CollectionRuleBulkDeleteView,
|
CollectionRuleBulkDeleteView,
|
||||||
CollectionRuleBulkDisableView,
|
CollectionRuleBulkDisableView,
|
||||||
|
|
@ -19,8 +16,6 @@ __all__ = [
|
||||||
"FeedCreateView",
|
"FeedCreateView",
|
||||||
"FeedUpdateView",
|
"FeedUpdateView",
|
||||||
"OPMLImportView",
|
"OPMLImportView",
|
||||||
"SubRedditCreateView",
|
|
||||||
"SubRedditUpdateView",
|
|
||||||
"CollectionRuleBulkDeleteView",
|
"CollectionRuleBulkDeleteView",
|
||||||
"CollectionRuleBulkDisableView",
|
"CollectionRuleBulkDisableView",
|
||||||
"CollectionRuleBulkEnableView",
|
"CollectionRuleBulkEnableView",
|
||||||
|
|
|
||||||
|
|
@ -1,27 +0,0 @@
|
||||||
from django.views.generic.edit import CreateView, UpdateView
|
|
||||||
|
|
||||||
from newsreader.news.collection.choices import RuleTypeChoices
|
|
||||||
from newsreader.news.collection.forms import SubRedditForm
|
|
||||||
from newsreader.news.collection.views.base import (
|
|
||||||
CollectionRuleDetailMixin,
|
|
||||||
CollectionRuleViewMixin,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class SubRedditCreateView(
|
|
||||||
CollectionRuleViewMixin, CollectionRuleDetailMixin, CreateView
|
|
||||||
):
|
|
||||||
form_class = SubRedditForm
|
|
||||||
template_name = "news/collection/views/subreddit-create.html"
|
|
||||||
|
|
||||||
|
|
||||||
class SubRedditUpdateView(
|
|
||||||
CollectionRuleViewMixin, CollectionRuleDetailMixin, UpdateView
|
|
||||||
):
|
|
||||||
form_class = SubRedditForm
|
|
||||||
template_name = "news/collection/views/subreddit-update.html"
|
|
||||||
context_object_name = "subreddit"
|
|
||||||
|
|
||||||
def get_queryset(self):
|
|
||||||
queryset = super().get_queryset()
|
|
||||||
return queryset.filter(type=RuleTypeChoices.subreddit)
|
|
||||||
|
|
@ -4,7 +4,6 @@ import factory
|
||||||
import factory.fuzzy
|
import factory.fuzzy
|
||||||
|
|
||||||
from newsreader.accounts.tests.factories import UserFactory
|
from newsreader.accounts.tests.factories import UserFactory
|
||||||
from newsreader.news.collection.reddit import REDDIT_API_URL
|
|
||||||
from newsreader.news.core.models import Category, Post
|
from newsreader.news.core.models import Category, Post
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -36,10 +35,3 @@ class PostFactory(factory.django.DjangoModelFactory):
|
||||||
|
|
||||||
class FeedPostFactory(PostFactory):
|
class FeedPostFactory(PostFactory):
|
||||||
rule = factory.SubFactory("newsreader.news.collection.tests.factories.FeedFactory")
|
rule = factory.SubFactory("newsreader.news.collection.tests.factories.FeedFactory")
|
||||||
|
|
||||||
|
|
||||||
class RedditPostFactory(PostFactory):
|
|
||||||
url = factory.fuzzy.FuzzyText(length=10, prefix=f"{REDDIT_API_URL}/")
|
|
||||||
rule = factory.SubFactory(
|
|
||||||
"newsreader.news.collection.tests.factories.SubredditFactory"
|
|
||||||
)
|
|
||||||
|
|
|
||||||
|
|
@ -20,9 +20,6 @@ class NewsView(NavListMixin, TemplateView):
|
||||||
**context,
|
**context,
|
||||||
"homepageSettings": {
|
"homepageSettings": {
|
||||||
"feedUrl": reverse_lazy("news:collection:feed-update", args=(0,)),
|
"feedUrl": reverse_lazy("news:collection:feed-update", args=(0,)),
|
||||||
"subredditUrl": reverse_lazy(
|
|
||||||
"news:collection:subreddit-update", args=(0,)
|
|
||||||
),
|
|
||||||
"categoriesUrl": reverse_lazy("news:core:category-update", args=(0,)),
|
"categoriesUrl": reverse_lazy("news:core:category-update", args=(0,)),
|
||||||
"timezone": settings.TIME_ZONE,
|
"timezone": settings.TIME_ZONE,
|
||||||
"autoMarking": self.request.user.auto_mark_read,
|
"autoMarking": self.request.user.auto_mark_read,
|
||||||
|
|
|
||||||
|
|
@ -13,12 +13,14 @@
|
||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
}
|
}
|
||||||
|
|
||||||
&--success, &--confirm {
|
&--success,
|
||||||
|
&--confirm {
|
||||||
background-color: var(--confirm-color);
|
background-color: var(--confirm-color);
|
||||||
color: $white !important;
|
color: $white !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
&--error, &--cancel {
|
&--error,
|
||||||
|
&--cancel {
|
||||||
color: $white !important;
|
color: $white !important;
|
||||||
background-color: var(--danger-color);
|
background-color: var(--danger-color);
|
||||||
}
|
}
|
||||||
|
|
@ -28,15 +30,6 @@
|
||||||
background-color: var(--info-color);
|
background-color: var(--info-color);
|
||||||
}
|
}
|
||||||
|
|
||||||
&--reddit {
|
|
||||||
color: $white !important;
|
|
||||||
background-color: $reddit-orange;
|
|
||||||
|
|
||||||
&:hover {
|
|
||||||
background-color: lighten($reddit-orange, 5%);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
&--disabled {
|
&--disabled {
|
||||||
color: var(--font-color) !important;
|
color: var(--font-color) !important;
|
||||||
background-color: var(--background-color-secondary) !important;
|
background-color: var(--background-color-secondary) !important;
|
||||||
|
|
|
||||||
|
|
@ -59,6 +59,3 @@ $dark-info-color: $blue;
|
||||||
$dark-info-font-color: $white;
|
$dark-info-font-color: $white;
|
||||||
|
|
||||||
$dark-sidebar-background-color: $dark-background-color-secondary;
|
$dark-sidebar-background-color: $dark-background-color-secondary;
|
||||||
|
|
||||||
// Third party
|
|
||||||
$reddit-orange: rgba(255, 69, 0, 1);
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue