Compare commits

...

35 commits
0.5.1 ... main

Author SHA1 Message Date
e40d69d5ff Use correct settings module for development
All checks were successful
ci/woodpecker/push/build Pipeline was successful
ci/woodpecker/push/lint Pipeline was successful
ci/woodpecker/push/tests Pipeline was successful
2025-05-11 09:44:55 +02:00
83707701e9 Fix template formatting issues
All checks were successful
ci/woodpecker/push/build Pipeline was successful
ci/woodpecker/push/lint Pipeline was successful
ci/woodpecker/push/tests Pipeline was successful
2025-05-05 16:49:34 +02:00
116e2c1577 Fix cache permissions
All checks were successful
ci/woodpecker/push/build Pipeline was successful
ci/woodpecker/push/lint Pipeline was successful
ci/woodpecker/push/tests Pipeline was successful
see https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/reference.md#run---mounttypecache
2025-05-05 16:22:07 +02:00
cf96371b90 Fix formatting errors warnings 2025-05-05 15:42:12 +02:00
eadd7a5612 Add missing command invocation
Some checks failed
ci/woodpecker/push/build Pipeline was successful
ci/woodpecker/push/lint Pipeline failed
ci/woodpecker/push/tests Pipeline was successful
2025-05-05 15:34:37 +02:00
62053a1048 Use uv image build with same python version
Some checks failed
ci/woodpecker/push/build Pipeline was successful
ci/woodpecker/push/lint Pipeline failed
ci/woodpecker/push/tests Pipeline failed
2025-05-05 15:32:51 +02:00
b4340176da Use correct project name
Some checks failed
ci/woodpecker/push/build Pipeline was successful
ci/woodpecker/push/lint Pipeline failed
ci/woodpecker/push/tests Pipeline was successful
2025-05-05 15:16:48 +02:00
433ff9413d Specify javascript build target 2025-05-05 15:14:54 +02:00
91949622b7 Update woodpecker configuration
Some checks failed
ci/woodpecker/push/build Pipeline failed
ci/woodpecker/push/lint Pipeline failed
ci/woodpecker/push/tests Pipeline was successful
2025-05-05 15:11:55 +02:00
10affeb32f Docker compose refactor
Added shell interpolation for environment variables
2025-05-05 15:02:03 +02:00
e96c6f3528 Use psycopg-binary package
To prevent building the package from source
2025-05-05 14:40:40 +02:00
a534a3b691 Move jest configuration
Some checks failed
ci/woodpecker/push/build Pipeline was successful
ci/woodpecker/push/lint Pipeline failed
ci/woodpecker/push/tests Pipeline failed
2025-05-04 19:52:24 +02:00
ebbbe99eaf Update package.json 2025-05-04 19:44:55 +02:00
c7f90e233e Move prettier configuration 2025-05-04 19:44:00 +02:00
9ba6824dd3 Remove unused isort configuration 2025-05-04 19:38:45 +02:00
4c5d3aec28 Move coverage configuration to pyproject.toml 2025-05-04 19:38:26 +02:00
dd9aaf467e Add editorconfig configuration 2025-05-04 19:34:25 +02:00
1417c52007 Apply prettier formatting
All checks were successful
ci/woodpecker/push/lint Pipeline was successful
ci/woodpecker/push/build Pipeline was successful
ci/woodpecker/push/tests Pipeline was successful
2025-03-28 21:55:35 +01:00
bfd081337b Run formatting / fix lint errors
Some checks failed
ci/woodpecker/push/build Pipeline was successful
ci/woodpecker/push/lint Pipeline failed
ci/woodpecker/push/tests Pipeline was successful
2025-03-28 21:41:47 +01:00
b8559f0499 Remove reddit code
Some checks failed
ci/woodpecker/push/build Pipeline was successful
ci/woodpecker/push/lint Pipeline failed
ci/woodpecker/push/tests Pipeline was successful
2025-03-27 22:02:12 +01:00
b465d0bb8d Remove leftover function binding usages
Some checks failed
ci/woodpecker/push/build Pipeline was successful
ci/woodpecker/push/lint Pipeline failed
ci/woodpecker/push/tests Pipeline failed
2025-03-27 21:44:21 +01:00
1a54fdbcd1 Remove function binding usage
Some checks failed
ci/woodpecker/push/build Pipeline failed
ci/woodpecker/push/lint Pipeline failed
ci/woodpecker/push/tests Pipeline failed
2025-03-24 09:17:30 +01:00
34afcc02b6 Remove requests oathlib
All checks were successful
ci/woodpecker/push/build Pipeline was successful
ci/woodpecker/push/lint Pipeline was successful
ci/woodpecker/push/tests Pipeline was successful
2025-03-23 21:16:36 +01:00
1574661c57 Fix ruff errors
All checks were successful
ci/woodpecker/push/build Pipeline was successful
ci/woodpecker/push/lint Pipeline was successful
ci/woodpecker/push/tests Pipeline was successful
2025-03-23 21:05:01 +01:00
3160becb72 Remove django-registration-redux
Some checks failed
ci/woodpecker/push/build Pipeline was successful
ci/woodpecker/push/lint Pipeline failed
ci/woodpecker/push/tests Pipeline was successful
2025-03-23 21:01:23 +01:00
105371abaf Use long command options
Some checks failed
ci/woodpecker/push/build Pipeline was successful
ci/woodpecker/push/lint Pipeline failed
ci/woodpecker/push/tests Pipeline was successful
2025-03-23 16:25:03 +01:00
ed37be0c60 Add celery healthcheck & update existing healthcheck 2025-03-23 16:24:33 +01:00
161234defd Bump rabbitmq version 2025-03-23 16:23:45 +01:00
f3ba0f1d09 Update ruff & uv usage
Some checks failed
ci/woodpecker/push/build Pipeline was successful
ci/woodpecker/push/lint Pipeline failed
ci/woodpecker/push/tests Pipeline was successful
2025-03-23 16:19:15 +01:00
aff565862c Add woodpecker CI configuration
All checks were successful
ci/woodpecker/push/build Pipeline was successful
ci/woodpecker/push/lint Pipeline was successful
ci/woodpecker/push/tests Pipeline was successful
2024-12-26 20:20:21 +01:00
bf43603d65 Update versions 2024-10-13 12:52:06 +02:00
91a7f6325c Update changelog 2024-10-13 12:49:55 +02:00
e33497569a Apply query optimizations for posts 2024-10-13 10:16:57 +02:00
2d5801f226 Update changelog 2024-10-07 21:43:52 +02:00
89d4ebdc49 Add missing VERSION environment variable 2024-10-07 21:42:20 +02:00
127 changed files with 1077 additions and 14723 deletions

View file

@ -1,16 +0,0 @@
[run]
source = ./src/newsreader/
omit =
**/tests/**
**/migrations/**
**/conf/**
**/apps.py
**/admin.py
**/tests.py
**/urls.py
**/wsgi.py
**/celery.py
**/__init__.py
[html]
directory = coverage

25
.editorconfig Normal file
View file

@ -0,0 +1,25 @@
# https://editorconfig.org
# top-most EditorConfig file
root = true
# Unix-style newlines with a newline ending every file
[*]
end_of_line = lf
trim_trailing_whitespace = true
[*.py]
indent_style = space
indent_size = 4
[*.{yaml,yml,toml,md}]
indent_style = space
indent_size = 2
[Dockerfile*]
indent_style = space
indent_size = 4
[*.json]
indent_style = space
indent_size = 2

View file

@ -1,29 +0,0 @@
stages:
- build
- test
- lint
- release
variables:
UV_CACHE_DIR: "$CI_PROJECT_DIR/.cache/uv"
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
DJANGO_SETTINGS_MODULE: "newsreader.conf.gitlab"
POSTGRES_HOST: "$POSTGRES_HOST"
POSTGRES_DB: "$POSTGRES_NAME"
POSTGRES_NAME: "$POSTGRES_NAME"
POSTGRES_USER: "$POSTGRES_USER"
POSTGRES_PASSWORD: "$POSTGRES_PASSWORD"
cache:
key: "$CI_COMMIT_REF_SLUG"
paths:
- .cache/pip
- .cache/uv
- node_modules/
- .venv/
include:
- local: '/gitlab-ci/build.yml'
- local: '/gitlab-ci/test.yml'
- local: '/gitlab-ci/lint.yml'
- local: '/gitlab-ci/release.yml'

View file

@ -1,12 +0,0 @@
[settings]
include_trailing_comma = true
line_length = 88
multi_line_output = 3
skip = env/, venv/
default_section = THIRDPARTY
known_first_party = newsreader
known_django = django
sections = FUTURE,STDLIB,DJANGO,THIRDPARTY,FIRSTPARTY,LOCALFOLDER
lines_between_types=1
lines_after_imports=2
lines_between_types=1

View file

@ -1,10 +0,0 @@
{
"semi": true,
"trailingComma": "es5",
"singleQuote": true,
"printWidth": 90,
"tabWidth": 2,
"useTabs": false,
"bracketSpacing": true,
"arrowParens": "avoid"
}

10
.woodpecker/build.yaml Normal file
View file

@ -0,0 +1,10 @@
when:
- event: push
- event: pull_request
- event: manual
steps:
- image: node:lts-alpine
commands:
- npm install
- npm run build:prod

18
.woodpecker/lint.yaml Normal file
View file

@ -0,0 +1,18 @@
when:
- event: push
- event: pull_request
- event: manual
steps:
- name: python linting
image: ghcr.io/astral-sh/uv:python3.11-alpine
commands:
- uv sync --group ci
- uv run --no-sync -- ruff check src/
- uv run --no-sync -- ruff format --check src/
- name: javascript linting
image: node:lts-alpine
commands:
- npm ci
- npm run lint

37
.woodpecker/tests.yaml Normal file
View file

@ -0,0 +1,37 @@
when:
- event: push
- event: pull_request
- event: manual
services:
- name: postgres
image: postgres:15
environment:
POSTGRES_NAME: &db-name newsreader
POSTGRES_USER: &db-user newsreader
POSTGRES_PASSWORD: &db-password sekrit
- name: memcached
image: memcached:1.5.22
steps:
- name: python tests
image: ghcr.io/astral-sh/uv:python3.11-alpine
environment:
DJANGO_SETTINGS_MODULE: "newsreader.conf.ci"
DJANGO_SECRET_KEY: sekrit
POSTGRES_HOST: postgres
POSTGRES_PORT: 5432
POSTGRES_DB: *db-name
POSTGRES_USER: *db-user
POSTGRES_PASSWORD: *db-password
commands:
- pip install uv
- uv sync --group ci
- uv run --no-sync -- coverage run ./src/manage.py test newsreader
- uv run --no-sync -- coverage report --show-missing
- name: javascript tests
image: node:lts-alpine
commands:
- npm ci
- npm test

View file

@ -1,5 +1,13 @@
# Changelog
## 0.5.3
- Apply query optimizations for retrieving posts
## 0.5.2
- Add missing `VERSION` environment variable
## 0.5.1
- Use line-through styling for read posts

84
Dockerfile Normal file
View file

@ -0,0 +1,84 @@
# stage 1
FROM python:3.11-alpine AS backend
ARG USER_ID=1000
ARG GROUP_ID=1000
ARG UV_LINK_MODE=copy
RUN apk update \
&& apk add --no-cache \
vim \
curl \
gettext
RUN addgroup -g $USER_ID newsreader && adduser -Du $GROUP_ID -G newsreader newsreader
RUN mkdir --parents /app/src /app/logs /app/media /app/bin /app/static \
&& chown -R newsreader:newsreader /app
WORKDIR /app
USER newsreader
COPY --chown=newsreader:newsreader uv.lock pyproject.toml /app/
COPY --from=ghcr.io/astral-sh/uv:python3.11-alpine /usr/local/bin/uv /bin/uv
RUN --mount=type=cache,uid=$USER_ID,gid=$GROUP_ID,target=/home/newsreader/.cache/uv \
uv sync --frozen --no-default-groups --no-install-project
COPY --chown=newsreader:newsreader ./bin/docker-entrypoint.sh /app/bin/docker-entrypoint.sh
VOLUME ["/app/logs", "/app/media", "/app/static"]
# stage 2
FROM node:lts-alpine AS frontend-build
ARG BUILD_ARG=prod
WORKDIR /app
RUN chown node:node /app
USER node
COPY --chown=node:node ./package*.json ./webpack.*.js ./babel.config.js /app/
RUN --mount=type=cache,uid=1000,gid=1000,target=/home/node/.npm \
npm ci
COPY --chown=node:node ./src /app/src
RUN npm run build:$BUILD_ARG
# stage 3
FROM backend AS production
COPY --from=frontend-build --chown=newsreader:newsreader \
/app/src/newsreader/static /app/src/newsreader/static
RUN --mount=type=cache,uid=$USER_ID,gid=$GROUP_ID,target=/home/newsreader/.cache/uv \
uv sync --frozen --only-group production --extra sentry
COPY --chown=newsreader:newsreader ./src /app/src
ENV DJANGO_SETTINGS_MODULE=newsreader.conf.production
# Note that the static volume will have to be recreated to be pre-populated
# correctly with the latest static files. See
# https://docs.docker.com/storage/volumes/#populate-a-volume-using-a-container
RUN uv run --no-sync -- src/manage.py collectstatic --noinput
# (optional) stage 4
FROM backend AS development
RUN --mount=type=cache,uid=$USER_ID,gid=$GROUP_ID,target=/home/newsreader/.cache/uv \
uv sync --frozen --group development
ENV DJANGO_SETTINGS_MODULE=newsreader.conf.docker

View file

@ -1,23 +1,21 @@
module.exports = api => {
const isTest = api.env('test');
const preset = [
"@babel/preset-env" , { targets: 'defaults' }
const preset = [
"@babel/preset-env", { targets: 'defaults' }
];
const testPreset = [
"@babel/preset-env", { targets: { node: process.versions.node } }
];
const testPreset = [
"@babel/preset-env", { targets: { node: process.versions.node } }
];
const plugins = [
"@babel/plugin-syntax-dynamic-import",
"@babel/plugin-transform-react-jsx",
"@babel/plugin-syntax-function-bind",
"@babel/plugin-proposal-function-bind",
"@babel/plugin-proposal-class-properties"
]
const plugins = [
"@babel/plugin-syntax-dynamic-import",
"@babel/plugin-transform-react-jsx",
"@babel/plugin-proposal-class-properties"
]
return {
"presets": [isTest ? testPreset: preset],
"plugins": plugins
}
return {
"presets": [isTest ? testPreset : preset],
"plugins": plugins
}
}

View file

@ -1,5 +1,5 @@
#!/bin/bash
/app/.venv/bin/python /app/src/manage.py migrate
uv run --no-sync -- /app/src/manage.py migrate
exec "$@"

View file

@ -1,18 +1,13 @@
volumes:
static-files:
node-modules:
services:
celery:
build:
target: development
volumes:
- ./src/:/app/src
django:
build:
build: &app-development-build
target: development
command: /app/.venv/bin/python /app/src/manage.py runserver 0.0.0.0:8000
command: uv run --no-sync -- /app/src/manage.py runserver 0.0.0.0:8000
environment: &django-env
DJANGO_SETTINGS_MODULE: ${DJANGO_SETTINGS_MODULE:-newsreader.conf.docker}
ports:
- "${DJANGO_PORT:-8000}:8000"
volumes:
@ -21,12 +16,21 @@ services:
stdin_open: true
tty: true
celery:
build:
<<: *app-development-build
environment:
<<: *django-env
volumes:
- ./src/:/app/src
webpack:
build:
target: frontend-build
context: .
dockerfile: ./docker/webpack
args:
BUILD_ARG: "dev"
command: npm run build:watch
volumes:
- ./src/:/app/src
- static-files:/app/src/newsreader/static
- node-modules:/app/node_modules

View file

@ -9,7 +9,6 @@ services:
django:
condition: service_healthy
ports:
# Note that --env-file should be used to set these correctly
- "${NGINX_HTTP_PORT:-80}:80"
volumes:
- ./config/nginx/conf.d:/etc/nginx/conf.d

View file

@ -4,37 +4,43 @@ volumes:
postgres-data:
static-files:
x-db-env: &db-env
POSTGRES_HOST:
POSTGRES_PORT:
POSTGRES_DB:
POSTGRES_USER:
POSTGRES_PASSWORD:
x-db-connection-env: &db-connection-env
POSTGRES_HOST: ${POSTGRES_HOST:-db}
POSTGRES_PORT: ${POSTGRES_PORT:-5432}
POSTGRES_DB: &pg-database ${POSTGRES_DB:-newsreader}
POSTGRES_USER: &pg-user ${POSTGRES_USER:-newsreader}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-newsreader}
x-django-build-env: &django-build-env
<<: *db-env
DJANGO_SECRET_KEY:
DJANGO_SETTINGS_MODULE:
x-db-env: &db-env
<<: *db-connection-env
PGUSER: *pg-user
PGDATABASE: *pg-database
x-django-env: &django-env
<<: *django-build-env
<<: *db-connection-env
ALLOWED_HOSTS: ${ALLOWED_HOSTS:-localhost,127.0.0.1,django}
INTERNAL_IPS: ${INTERNAL_IPS:-localhost,127.0.0.1,django}
# see token_urlsafe from python's secret module to generate one
DJANGO_SECRET_KEY: ${DJANGO_SECRET_KEY:-Ojg68lYsP3kq2r5JgozUzKVSRFywm17BTMS5iwpLM44}
DJANGO_SETTINGS_MODULE: ${DJANGO_SETTINGS_MODULE:-newsreader.conf.production}
ADMINS: ${ADMINS:-""}
VERSION: ${VERSION:-""}
# Email
EMAIL_HOST:
EMAIL_PORT:
EMAIL_HOST_USER:
EMAIL_HOST_PASSWORD:
EMAIL_USE_TLS:
EMAIL_USE_SSL:
EMAIL_DEFAULT_FROM:
# Reddit
REDDIT_CLIENT_ID:
REDDIT_CLIENT_SECRET:
REDDIT_CALLBACK_URL:
EMAIL_HOST: ${EMAIL_HOST:-localhost}
EMAIL_PORT: ${EMAIL_PORT:-25}
EMAIL_HOST_USER: ${EMAIL_HOST_USER:-""}
EMAIL_HOST_PASSWORD: ${EMAIL_HOST_PASSWORD:-""}
EMAIL_USE_TLS: ${EMAIL_USE_TLS:-no}
EMAIL_USE_SSL: ${EMAIL_USE_SSL:-no}
EMAIL_DEFAULT_FROM: ${EMAIL_DEFAULT_FROM:-webmaster@localhost}
# Sentry
SENTRY_DSN:
SENTRY_DSN: ${SENTRY_DSN:-""}
services:
db:
@ -42,8 +48,8 @@ services:
<<: *db-env
image: postgres:15
healthcheck:
# Note that --env-file should be used to set these correctly
test: /usr/bin/pg_isready --username="${POSTGRES_USER}" --dbname="${POSTGRES_DB}"
test: /usr/bin/pg_isready
start_period: 10s
interval: 5s
timeout: 10s
retries: 10
@ -51,7 +57,7 @@ services:
- postgres-data:/var/lib/postgresql/data
rabbitmq:
image: rabbitmq:3.12
image: rabbitmq:4
memcached:
image: memcached:1.6
@ -59,56 +65,26 @@ services:
- memcached
- -m 64
celery:
build:
context: .
dockerfile: ./docker/django
target: production
args:
<<: *django-build-env
environment:
<<: *django-env
command: |
/app/.venv/bin/celery --app newsreader
--workdir /app/src/
worker --loglevel INFO
--concurrency 2
--beat
--scheduler django
-n worker1@%h
-n worker2@%h
depends_on:
rabbitmq:
condition: service_started
memcached:
condition: service_started
db:
condition: service_healthy
django:
condition: service_healthy
volumes:
- logs:/app/logs
django:
build:
build: &app-build
context: .
dockerfile: ./docker/django
target: production
args:
<<: *django-build-env
environment:
<<: *django-env
entrypoint: /app/bin/docker-entrypoint.sh
entrypoint: ["/bin/sh", "/app/bin/docker-entrypoint.sh"]
command: |
/app/.venv/bin/gunicorn --bind 0.0.0.0:8000
uv run --no-sync --
gunicorn
--bind 0.0.0.0:8000
--workers 3
--chdir /app/src/
newsreader.wsgi:application
healthcheck:
test: /usr/bin/curl --fail http://django:8000 || exit 1
interval: 30s
start_period: 10s
interval: 10s
timeout: 10s
retries: 10
retries: 5
depends_on:
memcached:
condition: service_started
@ -118,3 +94,33 @@ services:
- logs:/app/logs
- media:/app/media
- static-files:/app/static
celery:
build:
<<: *app-build
environment:
<<: *django-env
command: |
uv run --no-sync --
celery
--app newsreader
--workdir /app/src/
worker --loglevel INFO
--concurrency 2
--beat
--scheduler django
-n worker1@%h
-n worker2@%h
healthcheck:
test: uv run --no-sync -- celery --app newsreader status || exit 1
start_period: 10s
interval: 10s
timeout: 10s
retries: 5
depends_on:
rabbitmq:
condition: service_started
django:
condition: service_healthy
volumes:
- logs:/app/logs

View file

@ -1,102 +0,0 @@
# stage 1
FROM python:3.11-bookworm AS backend
RUN apt-get update && apt-get install --yes --no-install-recommends \
vim \
curl \
gettext \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /app
RUN mkdir /app/src
RUN mkdir /app/logs
RUN mkdir /app/media
COPY uv.lock pyproject.toml /app/
COPY --from=ghcr.io/astral-sh/uv:latest /uv /bin/uv
RUN uv sync --frozen --no-install-project
# stage 2
FROM node:lts AS frontend-build
WORKDIR /app
COPY ./*.json ./*.js ./babel.config.js /app/
RUN npm ci
COPY ./src /app/src
RUN npm run build:prod
# stage 3
FROM python:3.11-bookworm AS production
RUN apt-get update && apt-get install --yes --no-install-recommends \
postgresql-client \
vim \
curl \
gettext \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /app
RUN mkdir /app/logs
RUN mkdir /app/media
RUN mkdir /app/bin
COPY --from=backend /app/.venv /app/.venv
COPY --from=backend /app/uv.lock /app/pyproject.toml /app/
COPY --from=backend /bin/uv /bin/uv
COPY ./bin/docker-entrypoint.sh /app/bin/docker-entrypoint.sh
COPY --from=frontend-build /app/src/newsreader/static /app/src/newsreader/static
COPY ./src /app/src
RUN uv sync --frozen --extra production
RUN useradd -M -u 1000 newsreader
RUN chown -R newsreader:newsreader /app
USER newsreader
ARG POSTGRES_HOST
ARG POSTGRES_PORT
ARG POSTGRES_DB
ARG POSTGRES_USER
ARG POSTGRES_PASSWORD
ARG DJANGO_SECRET_KEY
ARG DJANGO_SETTINGS_MODULE
# Note that the static volume will have to be recreated to be pre-populated
# correctly with the latest static files. See
# https://docs.docker.com/storage/volumes/#populate-a-volume-using-a-container
RUN /app/.venv/bin/python src/manage.py collectstatic --noinput
# (optional) stage 4
FROM python:3.11-bookworm AS development
RUN apt-get update && apt-get install --yes --no-install-recommends \
vim \
curl \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /app
RUN mkdir /app/logs
RUN mkdir /app/media
RUN mkdir /app/bin
COPY --from=backend /app/.venv /app/.venv
COPY --from=backend /app/uv.lock /app/pyproject.toml /app/
COPY ./bin/docker-entrypoint.sh /app/bin/docker-entrypoint.sh
COPY --from=backend /app/src/ /app/src/
COPY --from=backend /bin/uv /bin/uv
RUN uv sync --frozen --extra testing --extra development

View file

@ -1,10 +0,0 @@
FROM node:lts
WORKDIR /app
RUN mkdir /app/src
COPY package*.json webpack.*.js babel.config.js /app/
RUN npm install
COPY ./src /app/src

View file

@ -1,7 +0,0 @@
static:
stage: build
image: node:lts
before_script:
- npm install
script:
- npm run build

View file

@ -1,25 +0,0 @@
python-linting:
stage: lint
image: python:3.11
before_script:
- pip install uv
- uv sync --extra testing --extra ci
script:
- ./.venv/bin/ruff check src/
- ./.venv/bin/ruff format --check src/
only:
refs:
- main
- merge_requests
javascript-linting:
stage: lint
image: node:lts
before_script:
- npm install
script:
- npm run lint
only:
refs:
- main
- merge_requests

View file

@ -1,12 +0,0 @@
release:
stage: release
image: registry.gitlab.com/gitlab-org/release-cli:latest
rules:
- if: $CI_COMMIT_TAG
script:
- echo 'running release job'
release:
name: 'Release $CI_COMMIT_TAG'
description: './CHANGELOG.md'
tag_name: '$CI_COMMIT_TAG'
ref: '$CI_COMMIT_TAG'

View file

@ -1,20 +0,0 @@
python-tests:
stage: test
coverage: '/TOTAL\s+\d+\s+\d+\s+(\d+%)/'
services:
- postgres:15
- memcached:1.5.22
image: python:3.11
before_script:
- pip install uv
- uv sync --extra testing --extra ci
script:
- ./.venv/bin/coverage run ./src/manage.py test newsreader
javascript-tests:
stage: test
image: node:lts
before_script:
- npm install
script:
- npm test

View file

@ -1,6 +0,0 @@
module.exports = {
roots: ['src/newsreader/js/tests/'],
clearMocks: true,
coverageDirectory: 'coverage',
};

4
package-lock.json generated
View file

@ -1,12 +1,12 @@
{
"name": "newsreader",
"version": "0.4.4",
"version": "0.5.3",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "newsreader",
"version": "0.4.4",
"version": "0.5.3",
"license": "GPL-3.0-or-later",
"dependencies": {
"@fortawesome/fontawesome-free": "^5.15.2",

View file

@ -1,20 +1,19 @@
{
"name": "newsreader",
"version": "0.4.4",
"version": "0.5.3",
"description": "Application for viewing RSS feeds",
"main": "index.js",
"scripts": {
"lint": "npx prettier \"src/newsreader/js/**/*.js\" --check",
"format": "npx prettier \"src/newsreader/js/**/*.js\" --write",
"build": "npx webpack --config webpack.dev.babel.js",
"build:watch": "npx webpack --config webpack.dev.babel.js --watch",
"build:dev": "npx webpack --config webpack.dev.babel.js",
"build:prod": "npx webpack --config webpack.prod.babel.js",
"test": "npx jest",
"test:watch": "npm test -- --watch"
},
"repository": {
"type": "git",
"url": "[git@git.fudiggity.nl:5000]:sonny/newsreader.git"
"url": "forgejo.fudiggity.nl:sonny/newsreader"
},
"author": "Sonny",
"license": "GPL-3.0-or-later",
@ -55,5 +54,22 @@
"webpack": "^5.94.0",
"webpack-cli": "^5.1.4",
"webpack-merge": "^4.2.2"
},
"prettier": {
"semi": true,
"trailingComma": "es5",
"singleQuote": true,
"printWidth": 90,
"tabWidth": 2,
"useTabs": false,
"bracketSpacing": true,
"arrowParens": "avoid"
},
"jest": {
"roots": [
"src/newsreader/js/tests/"
],
"clearMocks": true,
"coverageDirectory": "coverage"
}
}

View file

@ -1,58 +1,81 @@
[project]
name = 'newsreader'
version = '0.4.4'
authors = [{name = 'Sonny', email= 'sonnyba871@gmail.com'}]
license = {text = 'GPL-3.0'}
requires-python = '>=3.11'
name = "newsreader"
version = "0.5.3"
authors = [{ name = "Sonny" }]
license = { text = "GPL-3.0" }
requires-python = ">=3.11"
dependencies = [
'django~=4.2',
'celery~=5.4',
'psycopg',
'django-axes',
'django-celery-beat~=2.7.0',
'django-registration-redux~=2.7',
'django-rest-framework',
'djangorestframework-camel-case',
'pymemcache',
'python-dotenv~=1.0.1',
'ftfy~=6.2',
'requests',
'requests_oauthlib',
'feedparser',
'bleach',
'beautifulsoup4',
'lxml',
"django~=4.2",
"celery~=5.4",
"psycopg[binary]",
"django-axes",
"django-celery-beat~=2.7.0",
"django-rest-framework",
"djangorestframework-camel-case",
"pymemcache",
"python-dotenv~=1.0.1",
"ftfy~=6.2",
"requests",
"feedparser",
"bleach",
"beautifulsoup4",
"lxml",
]
[project.optional-dependencies]
testing = [
'factory-boy',
'freezegun',
'tblib',
"ruff>=0.6.3",
]
[dependency-groups]
test-tools = ["ruff", "factory_boy", "freezegun"]
development = [
'django-debug-toolbar',
'django-stubs',
'django-extensions',
"django-debug-toolbar",
"django-stubs",
"django-extensions",
]
ci = ['coverage~=7.6.1']
production = ['gunicorn~=23.0', 'sentry-sdk~=2.0']
ci = ["coverage~=7.6.1"]
production = ["gunicorn~=23.0"]
[project.optional-dependencies]
sentry = ["sentry-sdk~=2.0"]
[tool.uv]
environments = ["sys_platform == 'linux'"]
default-groups = ["test-tools"]
[tool.ruff]
include = ['pyproject.toml', 'src/**/*.py']
include = ["pyproject.toml", "src/**/*.py"]
line-length = 88
[tool.ruff.lint.isort]
default-section = 'third-party'
known-first-party = ['newsreader']
[tool.ruff.lint]
select = ["E4", "E7", "E9", "F", "I"]
[tool.ruff.lint.isort]
lines-between-types=1
lines-after-imports=2
default-section = "third-party"
known-first-party = ["newsreader"]
section-order = [
"future",
"standard-library",
"django",
"third-party",
"first-party",
"local-folder",
]
[tool.ruff.lint.isort.sections]
django = ['django']
django = ["django"]
[tool.coverage.run]
source = ["./src/newsreader/"]
omit = [
"**/tests/**",
"**/migrations/**",
"**/conf/**",
"**/apps.py",
"**/admin.py",
"**/tests.py",
"**/urls.py",
"**/wsgi.py",
"**/celery.py",
"**/__init__.py"
]

View file

@ -11,12 +11,6 @@ class UserAdminForm(UserChangeForm):
class Meta:
widgets = {
"email": forms.EmailInput(attrs={"size": "50"}),
"reddit_access_token": forms.PasswordInput(
attrs={"size": "90"}, render_value=True
),
"reddit_refresh_token": forms.PasswordInput(
attrs={"size": "90"}, render_value=True
),
}
@ -34,10 +28,6 @@ class UserAdmin(DjangoUserAdmin):
_("User settings"),
{"fields": ("email", "password", "first_name", "last_name", "is_active")},
),
(
_("Reddit settings"),
{"fields": ("reddit_access_token", "reddit_refresh_token")},
),
(
_("Permission settings"),
{"classes": ("collapse",), "fields": ("is_staff", "is_superuser")},

View file

@ -0,0 +1,20 @@
# Generated by Django 4.2.16 on 2025-03-26 08:46
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("accounts", "0017_auto_20240906_0914"),
]
operations = [
migrations.RemoveField(
model_name="user",
name="reddit_access_token",
),
migrations.RemoveField(
model_name="user",
name="reddit_refresh_token",
),
]

View file

@ -39,10 +39,6 @@ class UserManager(DjangoUserManager):
class User(AbstractUser):
email = models.EmailField(_("email address"), unique=True)
# reddit settings
reddit_refresh_token = models.CharField(max_length=255, blank=True, null=True)
reddit_access_token = models.CharField(max_length=255, blank=True, null=True)
# settings
auto_mark_read = models.BooleanField(
_("Auto read marking"),

View file

@ -2,27 +2,23 @@
{% load i18n %}
{% block actions %}
<section class="section form__section--last">
<fieldset class="fieldset form__fieldset">
{% include "components/form/confirm-button.html" %}
<section class="section form__section--last">
<fieldset class="fieldset form__fieldset">
{% include "components/form/confirm-button.html" %}
<a class="link button button--primary" href="{% url 'accounts:password-change' %}">
{% trans "Change password" %}
</a>
<a class="link button button--primary" href="{% url 'accounts:password-change' %}">
{% trans "Change password" %}
</a>
{% if favicon_task_allowed %}
<a class="link button button--primary" href="{% url 'accounts:settings:favicon' %}">
{% trans "Fetch favicons" %}
</a>
{% else %}
<button class="button button--primary button--disabled" disabled>
{% trans "Fetch favicons" %}
</button>
{% endif %}
<a class="link button button--primary" href="{% url 'accounts:settings:integrations' %}">
{% trans "Third party integrations" %}
</a>
</fieldset>
</section>
{% if favicon_task_allowed %}
<a class="link button button--primary" href="{% url 'accounts:settings:favicon' %}">
{% trans "Fetch favicons" %}
</a>
{% else %}
<button class="button button--primary button--disabled" disabled>
{% trans "Fetch favicons" %}
</button>
{% endif %}
</fieldset>
</section>
{% endblock actions %}

View file

@ -1,47 +0,0 @@
{% extends "sidebar.html" %}
{% load i18n %}
{% block content %}
<main id="integrations--page" class="main" data-render-sidebar=true>
<div class="main__container">
<section class="section">
{% include "components/header/header.html" with title="Integrations" only %}
<div class="integrations">
<h3 class="integrations__title">Reddit</h3>
<div class="integrations__controls">
{% if reddit_authorization_url %}
<a class="link button button--reddit" href="{{ reddit_authorization_url }}">
{% trans "Authorize account" %}
</a>
{% else %}
<button class="button button--reddit button--disabled" disabled>
{% trans "Authorize account" %}
</button>
{% endif %}
{% if reddit_refresh_url %}
<a class="link button button--reddit" href="{{ reddit_refresh_url }}">
{% trans "Refresh token" %}
</a>
{% else %}
<button class="button button--reddit button--disabled" disabled>
{% trans "Refresh token" %}
</button>
{% endif %}
{% if reddit_revoke_url %}
<a class="link button button--reddit" href="{{ reddit_revoke_url }}">
{% trans "Deauthorize account" %}
</a>
{% else %}
<button class="button button--reddit button--disabled" disabled>
{% trans "Deauthorize account" %}
</button>
{% endif %}
</div>
</div>
</section>
</div>
</main>
{% endblock %}

View file

@ -1,22 +0,0 @@
{% extends "sidebar.html" %}
{% load i18n %}
{% block content %}
<main id="reddit--page" class="main" data-render-sidebar=true>
<div class="main__container">
<section class="section text-section">
{% if error %}
<h1 class="h1">{% trans "Reddit authorization failed" %}</h1>
<p>{{ error }}</p>
{% elif access_token and refresh_token %}
<h1 class="h1">{% trans "Reddit account is linked" %}</h1>
<p>{% trans "Your reddit account was successfully linked." %}</p>
{% endif %}
<p>
<a class="link" href="{% url 'accounts:settings:integrations' %}">{% trans "Return to integrations page" %}</a>
</p>
</section>
</div>
</main>
{% endblock %}

View file

@ -5,8 +5,6 @@ from django.utils.crypto import get_random_string
import factory
from registration.models import RegistrationProfile
from newsreader.accounts.models import User
@ -29,11 +27,3 @@ class UserFactory(factory.django.DjangoModelFactory):
class Meta:
model = User
class RegistrationProfileFactory(factory.django.DjangoModelFactory):
user = factory.SubFactory(UserFactory)
activation_key = factory.LazyFunction(get_activation_key)
class Meta:
model = RegistrationProfile

View file

@ -1,99 +0,0 @@
import datetime
from django.conf import settings
from django.test import TestCase
from django.urls import reverse
from django.utils.translation import gettext as _
from registration.models import RegistrationProfile
from newsreader.accounts.models import User
class ActivationTestCase(TestCase):
def setUp(self):
self.register_url = reverse("accounts:register")
self.register_success_url = reverse("accounts:register-complete")
self.success_url = reverse("accounts:activate-complete")
def test_activation(self):
data = {
"email": "test@test.com",
"password1": "test12456",
"password2": "test12456",
}
response = self.client.post(self.register_url, data)
self.assertRedirects(response, self.register_success_url)
register_profile = RegistrationProfile.objects.get()
kwargs = {"activation_key": register_profile.activation_key}
response = self.client.get(reverse("accounts:activate", kwargs=kwargs))
self.assertRedirects(response, self.success_url)
def test_expired_key(self):
data = {
"email": "test@test.com",
"password1": "test12456",
"password2": "test12456",
}
response = self.client.post(self.register_url, data)
register_profile = RegistrationProfile.objects.get()
user = register_profile.user
user.date_joined -= datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)
user.save()
kwargs = {"activation_key": register_profile.activation_key}
response = self.client.get(reverse("accounts:activate", kwargs=kwargs))
self.assertEqual(200, response.status_code)
self.assertContains(response, _("Account activation failed"))
user.refresh_from_db()
self.assertFalse(user.is_active)
def test_invalid_key(self):
data = {
"email": "test@test.com",
"password1": "test12456",
"password2": "test12456",
}
response = self.client.post(self.register_url, data)
self.assertRedirects(response, self.register_success_url)
kwargs = {"activation_key": "not-a-valid-key"}
response = self.client.get(reverse("accounts:activate", kwargs=kwargs))
self.assertContains(response, _("Account activation failed"))
user = User.objects.get()
self.assertEquals(user.is_active, False)
def test_activated_key(self):
data = {
"email": "test@test.com",
"password1": "test12456",
"password2": "test12456",
}
response = self.client.post(self.register_url, data)
self.assertRedirects(response, self.register_success_url)
register_profile = RegistrationProfile.objects.get()
kwargs = {"activation_key": register_profile.activation_key}
response = self.client.get(reverse("accounts:activate", kwargs=kwargs))
self.assertRedirects(response, self.success_url)
# try this a second time
response = self.client.get(reverse("accounts:activate", kwargs=kwargs))
self.assertRedirects(response, self.success_url)

View file

@ -1,275 +0,0 @@
from unittest.mock import patch
from urllib.parse import urlencode
from uuid import uuid4
from django.core.cache import cache
from django.test import TestCase
from django.urls import reverse
from bs4 import BeautifulSoup
from newsreader.accounts.tests.factories import UserFactory
from newsreader.news.collection.exceptions import (
StreamException,
StreamTooManyException,
)
class IntegrationsViewTestCase(TestCase):
def setUp(self):
self.user = UserFactory(email="test@test.nl", password="test")
self.client.force_login(self.user)
self.url = reverse("accounts:settings:integrations")
class RedditIntegrationsTestCase(IntegrationsViewTestCase):
def test_reddit_authorization(self):
self.user.reddit_refresh_token = None
self.user.save()
response = self.client.get(self.url)
soup = BeautifulSoup(response.content, features="lxml")
button = soup.find("a", class_="link button button--reddit")
self.assertEquals(button.text.strip(), "Authorize account")
def test_reddit_refresh_token(self):
self.user.reddit_refresh_token = "jadajadajada"
self.user.reddit_access_token = None
self.user.save()
response = self.client.get(self.url)
soup = BeautifulSoup(response.content, features="lxml")
button = soup.find("a", class_="link button button--reddit")
self.assertEquals(button.text.strip(), "Refresh token")
def test_reddit_revoke(self):
self.user.reddit_refresh_token = "jadajadajada"
self.user.reddit_access_token = None
self.user.save()
response = self.client.get(self.url)
soup = BeautifulSoup(response.content, features="lxml")
buttons = soup.find_all("a", class_="link button button--reddit")
self.assertIn(
"Deauthorize account", [button.text.strip() for button in buttons]
)
class RedditTemplateViewTestCase(TestCase):
def setUp(self):
self.user = UserFactory(email="test@test.nl", password="test")
self.client.force_login(self.user)
self.base_url = reverse("accounts:settings:reddit-template")
self.state = str(uuid4())
self.patch = patch("newsreader.news.collection.reddit.post")
self.mocked_post = self.patch.start()
def tearDown(self):
patch.stopall()
def test_simple(self):
response = self.client.get(self.base_url)
self.assertEquals(response.status_code, 200)
self.assertContains(response, "Return to integrations page")
def test_successful_authorization(self):
self.mocked_post.return_value.json.return_value = {
"access_token": "1001010412",
"refresh_token": "134510143",
}
cache.set(f"{self.user.email}-reddit-auth", self.state)
params = {"state": self.state, "code": "Valid code"}
url = f"{self.base_url}?{urlencode(params)}"
response = self.client.get(url)
self.mocked_post.assert_called_once()
self.assertEquals(response.status_code, 200)
self.assertContains(response, "Your reddit account was successfully linked.")
self.user.refresh_from_db()
self.assertEquals(self.user.reddit_access_token, "1001010412")
self.assertEquals(self.user.reddit_refresh_token, "134510143")
self.assertEquals(cache.get(f"{self.user.email}-reddit-auth"), None)
def test_error(self):
params = {"error": "Denied authorization"}
url = f"{self.base_url}?{urlencode(params)}"
response = self.client.get(url)
self.assertEquals(response.status_code, 200)
self.assertContains(response, "Denied authorization")
def test_invalid_state(self):
cache.set(f"{self.user.email}-reddit-auth", str(uuid4()))
params = {"code": "Valid code", "state": "Invalid state"}
url = f"{self.base_url}?{urlencode(params)}"
response = self.client.get(url)
self.assertEquals(response.status_code, 200)
self.assertContains(
response, "The saved state for Reddit authorization did not match"
)
def test_stream_error(self):
self.mocked_post.side_effect = StreamTooManyException
cache.set(f"{self.user.email}-reddit-auth", self.state)
params = {"state": self.state, "code": "Valid code"}
url = f"{self.base_url}?{urlencode(params)}"
response = self.client.get(url)
self.mocked_post.assert_called_once()
self.assertEquals(response.status_code, 200)
self.assertContains(response, "Too many requests")
self.user.refresh_from_db()
self.assertEquals(self.user.reddit_access_token, None)
self.assertEquals(self.user.reddit_refresh_token, None)
self.assertEquals(cache.get(f"{self.user.email}-reddit-auth"), self.state)
def test_unexpected_json(self):
self.mocked_post.return_value.json.return_value = {"message": "Happy eastern"}
cache.set(f"{self.user.email}-reddit-auth", self.state)
params = {"state": self.state, "code": "Valid code"}
url = f"{self.base_url}?{urlencode(params)}"
response = self.client.get(url)
self.mocked_post.assert_called_once()
self.assertEquals(response.status_code, 200)
self.assertContains(response, "Access and refresh token not found in response")
self.user.refresh_from_db()
self.assertEquals(self.user.reddit_access_token, None)
self.assertEquals(self.user.reddit_refresh_token, None)
self.assertEquals(cache.get(f"{self.user.email}-reddit-auth"), self.state)
class RedditTokenRedirectViewTestCase(TestCase):
def setUp(self):
self.user = UserFactory(email="test@test.nl", password="test")
self.client.force_login(self.user)
self.patch = patch("newsreader.accounts.views.integrations.RedditTokenTask")
self.mocked_task = self.patch.start()
def tearDown(self):
cache.clear()
def test_simple(self):
response = self.client.get(reverse("accounts:settings:reddit-refresh"))
self.assertRedirects(response, reverse("accounts:settings:integrations"))
self.mocked_task.delay.assert_called_once_with(self.user.pk)
self.assertEquals(1, cache.get(f"{self.user.email}-reddit-refresh"))
def test_not_active(self):
cache.set(f"{self.user.email}-reddit-refresh", 1)
response = self.client.get(reverse("accounts:settings:reddit-refresh"))
self.assertRedirects(response, reverse("accounts:settings:integrations"))
self.mocked_task.delay.assert_not_called()
class RedditRevokeRedirectViewTestCase(TestCase):
def setUp(self):
self.user = UserFactory(email="test@test.nl", password="test")
self.client.force_login(self.user)
self.patch = patch("newsreader.accounts.views.integrations.revoke_reddit_token")
self.mocked_revoke = self.patch.start()
def test_simple(self):
self.user.reddit_access_token = "jadajadajada"
self.user.reddit_refresh_token = "jadajadajada"
self.user.save()
self.mocked_revoke.return_value = True
response = self.client.get(reverse("accounts:settings:reddit-revoke"))
self.assertRedirects(response, reverse("accounts:settings:integrations"))
self.mocked_revoke.assert_called_once_with(self.user)
self.user.refresh_from_db()
self.assertEquals(self.user.reddit_access_token, None)
self.assertEquals(self.user.reddit_refresh_token, None)
def test_no_refresh_token(self):
self.user.reddit_refresh_token = None
self.user.save()
response = self.client.get(reverse("accounts:settings:reddit-revoke"))
self.assertRedirects(response, reverse("accounts:settings:integrations"))
self.mocked_revoke.assert_not_called()
def test_unsuccessful_response(self):
self.user.reddit_access_token = "jadajadajada"
self.user.reddit_refresh_token = "jadajadajada"
self.user.save()
self.mocked_revoke.return_value = False
response = self.client.get(reverse("accounts:settings:reddit-revoke"))
self.assertRedirects(response, reverse("accounts:settings:integrations"))
self.user.refresh_from_db()
self.assertEquals(self.user.reddit_access_token, "jadajadajada")
self.assertEquals(self.user.reddit_refresh_token, "jadajadajada")
def test_stream_exception(self):
self.user.reddit_access_token = "jadajadajada"
self.user.reddit_refresh_token = "jadajadajada"
self.user.save()
self.mocked_revoke.side_effect = StreamException
response = self.client.get(reverse("accounts:settings:reddit-revoke"))
self.assertRedirects(response, reverse("accounts:settings:integrations"))
self.user.refresh_from_db()
self.assertEquals(self.user.reddit_access_token, "jadajadajada")
self.assertEquals(self.user.reddit_refresh_token, "jadajadajada")

View file

@ -1,110 +0,0 @@
from django.core import mail
from django.test import TransactionTestCase as TestCase
from django.test.utils import override_settings
from django.urls import reverse
from django.utils.translation import gettext as _
from registration.models import RegistrationProfile
from newsreader.accounts.models import User
from newsreader.accounts.tests.factories import UserFactory
class RegistrationTestCase(TestCase):
def setUp(self):
self.url = reverse("accounts:register")
self.success_url = reverse("accounts:register-complete")
self.disallowed_url = reverse("accounts:register-closed")
def test_simple(self):
response = self.client.get(self.url)
self.assertEquals(response.status_code, 200)
def test_registration(self):
data = {
"email": "test@test.com",
"password1": "test12456",
"password2": "test12456",
}
response = self.client.post(self.url, data)
self.assertRedirects(response, self.success_url)
self.assertEquals(User.objects.count(), 1)
self.assertEquals(RegistrationProfile.objects.count(), 1)
user = User.objects.get()
self.assertEquals(user.is_active, False)
self.assertEquals(len(mail.outbox), 1)
def test_existing_email(self):
UserFactory(email="test@test.com")
data = {
"email": "test@test.com",
"password1": "test12456",
"password2": "test12456",
}
response = self.client.post(self.url, data)
self.assertEquals(response.status_code, 200)
self.assertEquals(User.objects.count(), 1)
self.assertContains(response, _("User with this Email address already exists"))
def test_pending_registration(self):
data = {
"email": "test@test.com",
"password1": "test12456",
"password2": "test12456",
}
response = self.client.post(self.url, data)
self.assertRedirects(response, self.success_url)
self.assertEquals(User.objects.count(), 1)
self.assertEquals(RegistrationProfile.objects.count(), 1)
user = User.objects.get()
self.assertEquals(user.is_active, False)
self.assertEquals(len(mail.outbox), 1)
response = self.client.post(self.url, data)
self.assertEquals(response.status_code, 200)
self.assertContains(response, _("User with this Email address already exists"))
def test_disabled_account(self):
UserFactory(email="test@test.com", is_active=False)
data = {
"email": "test@test.com",
"password1": "test12456",
"password2": "test12456",
}
response = self.client.post(self.url, data)
self.assertEquals(response.status_code, 200)
self.assertEquals(User.objects.count(), 1)
self.assertContains(response, _("User with this Email address already exists"))
@override_settings(REGISTRATION_OPEN=False)
def test_registration_closed(self):
response = self.client.get(self.url)
self.assertRedirects(response, self.disallowed_url)
data = {
"email": "test@test.com",
"password1": "test12456",
"password2": "test12456",
}
response = self.client.post(self.url, data)
self.assertRedirects(response, self.disallowed_url)
self.assertEquals(User.objects.count(), 0)
self.assertEquals(RegistrationProfile.objects.count(), 0)

View file

@ -1,77 +0,0 @@
from django.core import mail
from django.test import TransactionTestCase as TestCase
from django.urls import reverse
from django.utils.translation import gettext as _
from registration.models import RegistrationProfile
from newsreader.accounts.tests.factories import RegistrationProfileFactory, UserFactory
class ResendActivationTestCase(TestCase):
def setUp(self):
self.url = reverse("accounts:activate-resend")
self.success_url = reverse("accounts:activate-complete")
self.register_url = reverse("accounts:register")
def test_simple(self):
response = self.client.get(self.url)
self.assertEquals(response.status_code, 200)
def test_resent_form(self):
data = {
"email": "test@test.com",
"password1": "test12456",
"password2": "test12456",
}
response = self.client.post(self.register_url, data)
register_profile = RegistrationProfile.objects.get()
original_kwargs = {"activation_key": register_profile.activation_key}
response = self.client.post(self.url, {"email": "test@test.com"})
self.assertContains(response, _("We have sent an email to"))
self.assertEquals(len(mail.outbox), 2)
register_profile.refresh_from_db()
kwargs = {"activation_key": register_profile.activation_key}
response = self.client.get(reverse("accounts:activate", kwargs=kwargs))
self.assertRedirects(response, self.success_url)
register_profile.refresh_from_db()
user = register_profile.user
self.assertEquals(user.is_active, True)
# test the old activation code
response = self.client.get(reverse("accounts:activate", kwargs=original_kwargs))
self.assertEquals(response.status_code, 200)
self.assertContains(response, _("Account activation failed"))
def test_existing_account(self):
user = UserFactory(is_active=True)
RegistrationProfileFactory(user=user, activated=True)
response = self.client.post(self.url, {"email": user.email})
self.assertEquals(response.status_code, 200)
# default behaviour is to show success page but not send an email
self.assertContains(response, _("We have sent an email to"))
self.assertEquals(len(mail.outbox), 0)
def test_no_account(self):
response = self.client.post(self.url, {"email": "fake@mail.com"})
self.assertEquals(response.status_code, 200)
# default behaviour is to show success page but not send an email
self.assertContains(response, _("We have sent an email to"))
self.assertEquals(len(mail.outbox), 0)

View file

@ -2,11 +2,7 @@ from django.contrib.auth.decorators import login_required
from django.urls import include, path
from newsreader.accounts.views import (
ActivationCompleteView,
ActivationResendView,
ActivationView,
FaviconRedirectView,
IntegrationsView,
LoginView,
LogoutView,
PasswordChangeView,
@ -14,36 +10,11 @@ from newsreader.accounts.views import (
PasswordResetConfirmView,
PasswordResetDoneView,
PasswordResetView,
RedditRevokeRedirectView,
RedditTemplateView,
RedditTokenRedirectView,
RegistrationClosedView,
RegistrationCompleteView,
RegistrationView,
SettingsView,
)
settings_patterns = [
# Integrations
path(
"integrations/reddit/callback/",
login_required(RedditTemplateView.as_view()),
name="reddit-template",
),
path(
"integrations/reddit/refresh/",
login_required(RedditTokenRedirectView.as_view()),
name="reddit-refresh",
),
path(
"integrations/reddit/revoke/",
login_required(RedditRevokeRedirectView.as_view()),
name="reddit-revoke",
),
path(
"integrations/", login_required(IntegrationsView.as_view()), name="integrations"
),
# Misc
path("favicon/", login_required(FaviconRedirectView.as_view()), name="favicon"),
path("", login_required(SettingsView.as_view()), name="home"),
@ -53,24 +24,6 @@ urlpatterns = [
# Auth
path("login/", LoginView.as_view(), name="login"),
path("logout/", LogoutView.as_view(), name="logout"),
# Register
path("register/", RegistrationView.as_view(), name="register"),
path(
"register/complete/",
RegistrationCompleteView.as_view(),
name="register-complete",
),
path("register/closed/", RegistrationClosedView.as_view(), name="register-closed"),
path(
"activate/complete/", ActivationCompleteView.as_view(), name="activate-complete"
),
path("activate/resend/", ActivationResendView.as_view(), name="activate-resend"),
path(
# This URL should be placed after all activate/ url's (see arg)
"activate/<str:activation_key>/",
ActivationView.as_view(),
name="activate",
),
# Password
path("password-reset/", PasswordResetView.as_view(), name="password-reset"),
path(

View file

@ -1,11 +1,5 @@
from newsreader.accounts.views.auth import LoginView, LogoutView
from newsreader.accounts.views.favicon import FaviconRedirectView
from newsreader.accounts.views.integrations import (
IntegrationsView,
RedditRevokeRedirectView,
RedditTemplateView,
RedditTokenRedirectView,
)
from newsreader.accounts.views.password import (
PasswordChangeView,
PasswordResetCompleteView,
@ -13,34 +7,17 @@ from newsreader.accounts.views.password import (
PasswordResetDoneView,
PasswordResetView,
)
from newsreader.accounts.views.registration import (
ActivationCompleteView,
ActivationResendView,
ActivationView,
RegistrationClosedView,
RegistrationCompleteView,
RegistrationView,
)
from newsreader.accounts.views.settings import SettingsView
__all__ = [
"LoginView",
"LogoutView",
"FaviconRedirectView",
"IntegrationsView",
"RedditRevokeRedirectView",
"RedditTemplateView",
"RedditTokenRedirectView",
"PasswordChangeView",
"PasswordResetCompleteView",
"PasswordResetConfirmView",
"PasswordResetDoneView",
"PasswordResetView",
"ActivationCompleteView",
"ActivationResendView",
"ActivationView",
"RegistrationClosedView",
"RegistrationCompleteView",
"RegistrationView",
"SettingsView",
]

View file

@ -1,156 +0,0 @@
import logging
from django.contrib import messages
from django.core.cache import cache
from django.urls import reverse_lazy
from django.utils.translation import gettext as _
from django.views.generic import RedirectView, TemplateView
from newsreader.news.collection.exceptions import StreamException
from newsreader.news.collection.reddit import (
get_reddit_access_token,
get_reddit_authorization_url,
revoke_reddit_token,
)
from newsreader.news.collection.tasks import RedditTokenTask
from newsreader.utils.views import NavListMixin
logger = logging.getLogger(__name__)
class IntegrationsView(NavListMixin, TemplateView):
template_name = "accounts/views/integrations.html"
def get_context_data(self, **kwargs):
return {
**super().get_context_data(**kwargs),
**self.get_reddit_context(**kwargs),
}
def get_reddit_context(self, **kwargs):
user = self.request.user
reddit_authorization_url = None
reddit_refresh_url = None
reddit_task_active = cache.get(f"{user.email}-reddit-refresh")
if (
user.reddit_refresh_token
and not user.reddit_access_token
and not reddit_task_active
):
reddit_refresh_url = reverse_lazy("accounts:settings:reddit-refresh")
if not user.reddit_refresh_token:
reddit_authorization_url = get_reddit_authorization_url(user)
return {
"reddit_authorization_url": reddit_authorization_url,
"reddit_refresh_url": reddit_refresh_url,
"reddit_revoke_url": (
reverse_lazy("accounts:settings:reddit-revoke")
if not reddit_authorization_url
else None
),
}
class RedditTemplateView(NavListMixin, TemplateView):
template_name = "accounts/views/reddit.html"
def get(self, request, *args, **kwargs):
context = self.get_context_data(**kwargs)
error = request.GET.get("error", None)
state = request.GET.get("state", None)
code = request.GET.get("code", None)
if error:
return self.render_to_response({**context, "error": error})
if not code or not state:
return self.render_to_response(context)
cached_state = cache.get(f"{request.user.email}-reddit-auth")
if state != cached_state:
return self.render_to_response(
{
**context,
"error": _(
"The saved state for Reddit authorization did not match"
),
}
)
try:
access_token, refresh_token = get_reddit_access_token(code, request.user)
return self.render_to_response(
{
**context,
"access_token": access_token,
"refresh_token": refresh_token,
}
)
except StreamException as e:
return self.render_to_response({**context, "error": str(e)})
except KeyError:
return self.render_to_response(
{
**context,
"error": _("Access and refresh token not found in response"),
}
)
class RedditTokenRedirectView(RedirectView):
url = reverse_lazy("accounts:settings:integrations")
def get(self, request, *args, **kwargs):
response = super().get(request, *args, **kwargs)
user = request.user
task_active = cache.get(f"{user.email}-reddit-refresh")
if not task_active:
RedditTokenTask.delay(user.pk)
messages.success(request, _("Access token is being retrieved"))
cache.set(f"{user.email}-reddit-refresh", 1, 300)
return response
messages.error(request, _("Unable to retrieve token"))
return response
class RedditRevokeRedirectView(RedirectView):
url = reverse_lazy("accounts:settings:integrations")
def get(self, request, *args, **kwargs):
response = super().get(request, *args, **kwargs)
user = request.user
if not user.reddit_refresh_token:
messages.error(request, _("No reddit account is linked to this account"))
return response
try:
is_revoked = revoke_reddit_token(user)
except StreamException:
logger.exception(f"Unable to revoke reddit token for {user.pk}")
messages.error(request, _("Unable to revoke reddit token"))
return response
if not is_revoked:
messages.error(request, _("Unable to revoke reddit token"))
return response
user.reddit_access_token = None
user.reddit_refresh_token = None
user.save()
messages.success(request, _("Reddit account deathorized"))
return response

View file

@ -1,56 +0,0 @@
from django.shortcuts import render
from django.urls import reverse_lazy
from django.views.generic import TemplateView
from registration.backends.default import views as registration_views
from newsreader.utils.views import NavListMixin
# RegistrationView shows a registration form and sends the email
# RegistrationCompleteView shows after filling in the registration form
# ActivationView is send within the activation email and activates the account
# ActivationCompleteView shows the success screen when activation was succesful
# ActivationResendView can be used when activation links are expired
# RegistrationClosedView shows when registration is disabled
class RegistrationView(NavListMixin, registration_views.RegistrationView):
disallowed_url = reverse_lazy("accounts:register-closed")
template_name = "registration/registration_form.html"
success_url = reverse_lazy("accounts:register-complete")
class RegistrationCompleteView(NavListMixin, TemplateView):
template_name = "registration/registration_complete.html"
class RegistrationClosedView(NavListMixin, TemplateView):
template_name = "registration/registration_closed.html"
# Redirects or renders failed activation template
class ActivationView(NavListMixin, registration_views.ActivationView):
template_name = "registration/activation_failure.html"
def get_success_url(self, user):
return ("accounts:activate-complete", (), {})
class ActivationCompleteView(NavListMixin, TemplateView):
template_name = "registration/activation_complete.html"
# Renders activation form resend or resend_activation_complete
class ActivationResendView(NavListMixin, registration_views.ResendActivationView):
template_name = "registration/activation_resend_form.html"
def render_form_submitted_template(self, form):
"""
Renders resend activation complete template with the submitted email.
"""
email = form.cleaned_data["email"]
context = {"email": email}
return render(
self.request, "registration/activation_resend_complete.html", context
)

View file

@ -1,9 +1,7 @@
import os
from pathlib import Path
from dotenv import load_dotenv
from newsreader.conf.utils import get_env, get_root_dir
load_dotenv()
@ -15,16 +13,13 @@ except ImportError:
DjangoIntegration = None
BASE_DIR = Path(__file__).resolve().parent.parent.parent.parent
DJANGO_PROJECT_DIR = os.path.join(BASE_DIR, "src", "newsreader")
BASE_DIR = get_root_dir()
DJANGO_PROJECT_DIR = BASE_DIR / "src" / "newsreader"
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: don"t run with debug turned on in production!
DEBUG = False
ALLOWED_HOSTS = ["127.0.0.1", "localhost"]
INTERNAL_IPS = ["127.0.0.1", "localhost"]
ALLOWED_HOSTS = get_env("ALLOWED_HOSTS", split=",", default=["127.0.0.1", "localhost"])
INTERNAL_IPS = get_env("INTERNAL_IPS", split=",", default=["127.0.0.1", "localhost"])
# Application definition
INSTALLED_APPS = [
@ -39,7 +34,6 @@ INSTALLED_APPS = [
"rest_framework",
"celery",
"django_celery_beat",
"registration",
"axes",
# app modules
"newsreader.accounts",
@ -49,7 +43,7 @@ INSTALLED_APPS = [
"newsreader.news.collection",
]
SECRET_KEY = os.environ["DJANGO_SECRET_KEY"]
SECRET_KEY = get_env("DJANGO_SECRET_KEY", default="")
AUTHENTICATION_BACKENDS = [
"axes.backends.AxesBackend",
@ -74,11 +68,10 @@ FORM_RENDERER = "django.forms.renderers.TemplatesSetting"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [os.path.join(DJANGO_PROJECT_DIR, "templates")],
"DIRS": [DJANGO_PROJECT_DIR / "templates"],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
@ -89,16 +82,14 @@ TEMPLATES = [
WSGI_APPLICATION = "newsreader.wsgi.application"
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
"default": {
"ENGINE": "django.db.backends.postgresql",
"HOST": os.environ["POSTGRES_HOST"],
"PORT": os.environ["POSTGRES_PORT"],
"NAME": os.environ["POSTGRES_DB"],
"USER": os.environ["POSTGRES_USER"],
"PASSWORD": os.environ["POSTGRES_PASSWORD"],
"HOST": get_env("POSTGRES_HOST", default=""),
"PORT": get_env("POSTGRES_PORT", default=""),
"NAME": get_env("POSTGRES_DB", default=""),
"USER": get_env("POSTGRES_USER", default=""),
"PASSWORD": get_env("POSTGRES_PASSWORD", default=""),
}
}
@ -115,8 +106,6 @@ CACHES = {
},
}
# Logging
# https://docs.djangoproject.com/en/2.2/topics/logging/#configuring-logging
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
@ -170,8 +159,6 @@ LOGGING = {
},
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator"
@ -186,8 +173,6 @@ AUTH_USER_MODEL = "accounts.User"
LOGIN_REDIRECT_URL = "/"
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = "en-us"
TIME_ZONE = "Europe/Amsterdam"
@ -195,30 +180,31 @@ USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = "/static/"
STATIC_ROOT = os.path.join(BASE_DIR, "static")
STATICFILES_DIRS = [os.path.join(DJANGO_PROJECT_DIR, "static")]
STATIC_ROOT = BASE_DIR / "static"
STATICFILES_DIRS = (DJANGO_PROJECT_DIR / "static",)
# https://docs.djangoproject.com/en/2.2/ref/settings/#std:setting-STATICFILES_FINDERS
STATICFILES_FINDERS = [
"django.contrib.staticfiles.finders.FileSystemFinder",
"django.contrib.staticfiles.finders.AppDirectoriesFinder",
]
# Email
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
EMAIL_BACKEND = "django.core.mail.backends.smtp.EmailBackend"
# Reddit integration
REDDIT_CLIENT_ID = "CLIENT_ID"
REDDIT_CLIENT_SECRET = "CLIENT_SECRET"
REDDIT_REDIRECT_URL = (
"http://127.0.0.1:8000/accounts/settings/integrations/reddit/callback/"
DEFAULT_FROM_EMAIL = get_env(
"EMAIL_DEFAULT_FROM", required=False, default="webmaster@localhost"
)
# Twitter integration
TWITTER_URL = "https://twitter.com"
EMAIL_HOST = get_env("EMAIL_HOST", required=False, default="localhost")
EMAIL_PORT = get_env("EMAIL_PORT", cast=int, required=False, default=25)
EMAIL_HOST_USER = get_env("EMAIL_HOST_USER", required=False, default="")
EMAIL_HOST_PASSWORD = get_env("EMAIL_HOST_PASSWORD", required=False, default="")
EMAIL_USE_TLS = get_env("EMAIL_USE_TLS", required=False, default=False)
EMAIL_USE_SSL = get_env("EMAIL_USE_SSL", required=False, default=False)
# Third party settings
AXES_HANDLER = "axes.handlers.cache.AxesCacheHandler"
@ -227,7 +213,6 @@ AXES_FAILURE_LIMIT = 5
AXES_COOLOFF_TIME = 3 # in hours
AXES_RESET_ON_SUCCESS = True
# TODO: verify parser works correctly
REST_FRAMEWORK = {
"DEFAULT_AUTHENTICATION_CLASSES": (
"rest_framework.authentication.SessionAuthentication",
@ -256,13 +241,9 @@ SWAGGER_SETTINGS = {
CELERY_WORKER_HIJACK_ROOT_LOGGER = False
CELERY_BROKER_URL = "amqp://guest@rabbitmq:5672"
REGISTRATION_OPEN = True
REGISTRATION_AUTO_LOGIN = True
ACCOUNT_ACTIVATION_DAYS = 7
# Sentry
SENTRY_CONFIG = {
"dsn": os.environ.get("SENTRY_DSN"),
"dsn": get_env("SENTRY_DSN", default="", required=False),
"send_default_pii": False,
"integrations": [DjangoIntegration(), CeleryIntegration()]
if DjangoIntegration and CeleryIntegration

View file

@ -1,5 +1,5 @@
from .base import * # noqa: F403
from .version import get_current_version
from .utils import get_current_version
DEBUG = True
@ -33,7 +33,7 @@ CACHES = {
# Project settings
VERSION = get_current_version()
ENVIRONMENT = "gitlab"
ENVIRONMENT = "ci"
try:
# Optionally use sentry integration

View file

@ -1,5 +1,5 @@
from .base import * # noqa: F403
from .version import get_current_version
from .utils import get_current_version
SECRET_KEY = "mv4&5#+)-=abz3^&1r^nk_ca6y54--p(4n4cg%z*g&rb64j%wl"
@ -10,6 +10,11 @@ EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
INSTALLED_APPS += ["debug_toolbar", "django_extensions"] # noqa: F405
TEMPLATES[0]["OPTIONS"]["context_processors"].append( # noqa: F405
"django.template.context_processors.debug",
)
# Project settings
VERSION = get_current_version()

View file

@ -1,8 +1,8 @@
from .base import * # noqa: F403
from .version import get_current_version
from .utils import get_current_version
ALLOWED_HOSTS = ["django", "127.0.0.1"]
DEBUG = True
INSTALLED_APPS += ["debug_toolbar", "django_extensions"] # noqa: F405
@ -16,7 +16,10 @@ LOGGING["loggers"].update( # noqa: F405
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
DEBUG = True
TEMPLATES[0]["OPTIONS"]["context_processors"].append( # noqa: F405
"django.template.context_processors.debug",
)
# Project settings
VERSION = get_current_version()

View file

@ -1,64 +1,24 @@
import os
from .version import get_current_version
from newsreader.conf.utils import get_env
from .base import * # noqa: F403
from .utils import get_current_version
DEBUG = False
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
ALLOWED_HOSTS = ["127.0.0.1", "localhost", "rss.fudiggity.nl", "django"]
ADMINS = [
("", email)
for email in os.getenv("ADMINS", "").split(",")
if os.environ.get("ADMINS")
("", email) for email in get_env("ADMINS", split=",", required=False, default=[])
]
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [os.path.join(DJANGO_PROJECT_DIR, "templates")], # noqa: F405
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
]
},
}
]
# Email
EMAIL_BACKEND = "django.core.mail.backends.smtp.EmailBackend"
DEFAULT_FROM_EMAIL = os.environ.get("EMAIL_DEFAULT_FROM", "webmaster@localhost")
EMAIL_HOST = os.environ.get("EMAIL_HOST", "localhost")
EMAIL_PORT = os.environ.get("EMAIL_PORT", 25)
EMAIL_HOST_USER = os.environ.get("EMAIL_HOST_USER", "")
EMAIL_HOST_PASSWORD = os.environ.get("EMAIL_HOST_PASSWORD", "")
EMAIL_USE_TLS = bool(os.environ.get("EMAIL_USE_TLS"))
EMAIL_USE_SSL = bool(os.environ.get("EMAIL_USE_SSL"))
# Project settings
VERSION = get_current_version(debug=False)
ENVIRONMENT = "production"
# Reddit integration
REDDIT_CLIENT_ID = os.environ.get("REDDIT_CLIENT_ID", "")
REDDIT_CLIENT_SECRET = os.environ.get("REDDIT_CLIENT_SECRET", "")
REDDIT_REDIRECT_URL = os.environ.get("REDDIT_CALLBACK_URL", "")
# Third party settings
AXES_HANDLER = "axes.handlers.database.AxesDatabaseHandler"
REGISTRATION_OPEN = False
# Optionally use sentry integration
try:
from sentry_sdk import init as sentry_init

View file

@ -0,0 +1,85 @@
import logging
import os
import subprocess
from pathlib import Path
from typing import Any, Iterable, Type
logger = logging.getLogger(__name__)
def get_env(
name: str,
cast: Type = str,
required: bool = True,
default: Any = None,
split: str = "",
) -> Any:
if cast is not str and split:
raise TypeError(f"Split is not possible with {cast}")
value = os.getenv(name)
if not value:
if required:
logger.warning(f"Missing environment variable: {name}")
return default
bool_mapping = {"yes": True, "true": True, "false": False, "no": False}
if cast is bool:
_value = bool_mapping.get(value.lower())
if not value:
raise ValueError(f"Unknown boolean value: {_value}")
return _value
value = value if not cast else cast(value)
return value if not split else value.split(split)
def get_current_version(debug: bool = True) -> str:
version = get_env("VERSION", required=False)
if version:
return version
if debug:
try:
output = subprocess.check_output(
["git", "show", "--no-patch", "--format=%H"], universal_newlines=True
)
return output.strip()
except (subprocess.CalledProcessError, OSError):
return ""
try:
output = subprocess.check_output(
["git", "describe", "--tags"], universal_newlines=True
)
return output.strip()
except (subprocess.CalledProcessError, OSError):
return ""
ROOT_MARKERS = ("pyproject.toml", "package.json", "README.md", "CHANGELOG.md")
def get_root_dir() -> Path:
file = Path(__file__)
return _traverse_dirs(file.parent, ROOT_MARKERS)
def _traverse_dirs(path: Path, root_markers: Iterable[str]) -> Path:
if path.parent == path:
raise OSError("Root directory detected")
files = (file.name for file in path.iterdir())
if not any((marker for marker in root_markers if marker in files)):
return _traverse_dirs(path.parent, root_markers)
return path

View file

@ -1,24 +0,0 @@
import os
import subprocess
def get_current_version(debug=True):
if "VERSION" in os.environ:
return os.environ["VERSION"]
if debug:
try:
output = subprocess.check_output(
["git", "show", "--no-patch", "--format=%H"], universal_newlines=True
)
return output.strip()
except (subprocess.CalledProcessError, OSError):
return ""
try:
output = subprocess.check_output(
["git", "describe", "--tags"], universal_newlines=True
)
return output.strip()
except (subprocess.CalledProcessError, OSError):
return ""

View file

@ -3,15 +3,13 @@ import React from 'react';
class Messages extends React.Component {
state = { messages: this.props.messages };
close = ::this.close;
close(index) {
close = index => {
const newMessages = this.state.messages.filter((message, currentIndex) => {
return currentIndex != index;
});
this.setState({ messages: newMessages });
}
};
render() {
const messages = this.state.messages.map((message, index) => {

View file

@ -1,6 +1,4 @@
class Selector {
onClick = ::this.onClick;
inputs = [];
constructor() {
@ -11,13 +9,13 @@ class Selector {
selectAllInput.onchange = this.onClick;
}
onClick(e) {
onClick = e => {
const targetValue = e.target.checked;
this.inputs.forEach(input => {
input.checked = targetValue;
});
}
};
}
export default Selector;

View file

@ -9,10 +9,6 @@ import Messages from '../../components/Messages.js';
import Sidebar from '../../components/Sidebar.js';
class App extends React.Component {
selectCategory = ::this.selectCategory;
deselectCategory = ::this.deselectCategory;
deleteCategory = ::this.deleteCategory;
constructor(props) {
super(props);
@ -24,15 +20,15 @@ class App extends React.Component {
};
}
selectCategory(categoryId) {
selectCategory = categoryId => {
this.setState({ selectedCategoryId: categoryId });
}
};
deselectCategory() {
deselectCategory = () => {
this.setState({ selectedCategoryId: null });
}
};
deleteCategory(categoryId) {
deleteCategory = categoryId => {
const url = `/api/categories/${categoryId}/`;
const options = {
method: 'DELETE',
@ -60,7 +56,7 @@ class App extends React.Component {
text: 'Unable to remove category, try again later',
};
return this.setState({ selectedCategoryId: null, message: message });
}
};
render() {
const { categories } = this.state;

View file

@ -33,7 +33,6 @@ class App extends React.Component {
<HomepageSidebar navLinks={this.props.navLinks} />
<PostList
feedUrl={this.props.feedUrl}
subredditUrl={this.props.subredditUrl}
timezone={this.props.timezone}
forwardedRef={this.postListRef}
postsByType={this.props.postsByType}
@ -46,7 +45,6 @@ class App extends React.Component {
category={this.props.category}
selectedType={this.props.selectedType}
feedUrl={this.props.feedUrl}
subredditUrl={this.props.subredditUrl}
categoriesUrl={this.props.categoriesUrl}
timezone={this.props.timezone}
autoMarking={this.props.autoMarking}

View file

@ -124,10 +124,10 @@ export const fetchPostsBySection = (section, next = false) => {
switch (section.type) {
case RULE_TYPE:
url = next ? next : `/api/rules/${section.id}/posts/?read=false`;
url = next ? next : `/api/rules/${section.id}/posts/`;
break;
case CATEGORY_TYPE:
url = next ? next : `/api/categories/${section.id}/posts/?read=false`;
url = next ? next : `/api/categories/${section.id}/posts/`;
break;
}

View file

@ -3,11 +3,10 @@ import { connect } from 'react-redux';
import Cookies from 'js-cookie';
import { unSelectPost, markPostRead, toggleSaved } from '../actions/posts.js';
import { SAVED_TYPE, SUBREDDIT } from '../constants.js';
import { SAVED_TYPE } from '../constants.js';
import { formatDatetime } from '../../../utils.js';
class PostModal extends React.Component {
modalListener = ::this.modalListener;
readTimer = null;
componentDidMount() {
@ -32,13 +31,13 @@ class PostModal extends React.Component {
window.removeEventListener('click', this.modalListener);
}
modalListener(e) {
modalListener = e => {
const targetClassName = e.target.className;
if (this.props.post && targetClassName == 'modal post-modal') {
this.props.unSelectPost();
}
}
};
render() {
const post = this.props.post;
@ -54,9 +53,6 @@ class PostModal extends React.Component {
let ruleUrl = '';
switch (this.props.rule.type) {
case SUBREDDIT:
ruleUrl = `${this.props.subredditUrl}/${this.props.rule.id}/`;
break;
default:
ruleUrl = `${this.props.feedUrl}/${this.props.rule.id}/`;
break;

View file

@ -1,8 +1,6 @@
import React from 'react';
export default class ScrollTop extends React.Component {
scrollListener = ::this.scrollListener;
state = {
listenerAttached: false,
showTop: false,
@ -17,7 +15,7 @@ export default class ScrollTop extends React.Component {
}
}
scrollListener() {
scrollListener = () => {
const postList = this.props.postListNode;
const elementEnd =
postList.scrollTop + postList.offsetHeight >= postList.scrollHeight;
@ -26,7 +24,7 @@ export default class ScrollTop extends React.Component {
showTop: postList.scrollTop > window.innerHeight,
showBottom: !elementEnd,
});
}
};
render() {
const postList = this.props.postListNode;

View file

@ -2,7 +2,7 @@ import React from 'react';
import { connect } from 'react-redux';
import Cookies from 'js-cookie';
import { CATEGORY_TYPE, SAVED_TYPE, SUBREDDIT } from '../../constants.js';
import { CATEGORY_TYPE, SAVED_TYPE } from '../../constants.js';
import { selectPost, toggleSaved } from '../../actions/posts.js';
import { formatDatetime } from '../../../../utils.js';
@ -18,12 +18,7 @@ class PostItem extends React.Component {
: 'posts__header';
const savedIconClass = post.saved ? 'saved-icon saved-icon--saved' : 'saved-icon';
let ruleUrl = '';
if (rule.type === SUBREDDIT) {
ruleUrl = `${this.props.subredditUrl}/${rule.id}/`;
} else {
ruleUrl = `${this.props.feedUrl}/${rule.id}/`;
}
const ruleUrl = `${this.props.feedUrl}/${rule.id}/`;
return (
<li className="posts__item" ref={this.props.forwardedRef}>

View file

@ -4,13 +4,11 @@ import { isEqual } from 'lodash';
import { fetchPostsBySection, fetchSavedPosts } from '../../actions/posts.js';
import { SAVED_TYPE } from '../../constants.js';
import { filterPosts } from './filters.js';
import LoadingIndicator from '../../../../components/LoadingIndicator.js';
import PostItem from './PostItem.js';
class PostList extends React.Component {
handleIntersect = ::this.handleIntersect;
lastPostRef = null;
observer = null;
@ -33,7 +31,7 @@ class PostList extends React.Component {
this.observer.disconnect();
}
handleIntersect(entries) {
handleIntersect = entries => {
entries.every(entry => {
if (entry.isIntersecting) {
this.observer.unobserve(entry.target);
@ -45,7 +43,7 @@ class PostList extends React.Component {
return false;
}
});
}
};
paginate() {
if (this.props.selected.type === SAVED_TYPE) {
@ -64,7 +62,6 @@ class PostList extends React.Component {
post: item,
selected: this.props.selected,
feedUrl: this.props.feedUrl,
subredditUrl: this.props.subredditUrl,
};
if (isLastItem?.id === item.id) {

View file

@ -5,15 +5,13 @@ import Cookies from 'js-cookie';
import { markRead } from '../../actions/selected.js';
class ReadButton extends React.Component {
markSelectedRead = ::this.markSelectedRead;
markSelectedRead() {
markSelectedRead = () => {
const token = Cookies.get('csrftoken');
if (this.props.selected.unread > 0) {
this.props.markRead({ ...this.props.selected }, token);
}
}
};
render() {
return (

View file

@ -2,5 +2,4 @@ export const RULE_TYPE = 'RULE';
export const CATEGORY_TYPE = 'CATEGORY';
export const SAVED_TYPE = 'SAVED';
export const SUBREDDIT = 'subreddit';
export const FEED = 'feed';

View file

@ -12,7 +12,7 @@ if (page) {
const store = configureStore();
const settings = JSON.parse(document.getElementById('homepageSettings').textContent);
const { feedUrl, subredditUrl, categoriesUrl } = settings;
const { feedUrl, categoriesUrl } = settings;
const navLinks = JSON.parse(document.getElementById('Links').textContent);
@ -20,7 +20,6 @@ if (page) {
<Provider store={store}>
<App
feedUrl={feedUrl.substring(1, feedUrl.length - 3)}
subredditUrl={subredditUrl.substring(1, subredditUrl.length - 3)}
categoriesUrl={categoriesUrl.substring(1, categoriesUrl.length - 3)}
timezone={settings.timezone}
autoMarking={settings.autoMarking}

View file

@ -304,10 +304,10 @@ describe('post actions', () => {
type: constants.RULE_TYPE,
};
fetchMock.getOnce('/api/rules/4/posts/?read=false', {
fetchMock.getOnce('/api/rules/4/posts/', {
body: {
count: 2,
next: 'https://durp.com/api/rules/4/posts/?cursor=jabadabar&read=false',
next: 'https://durp.com/api/rules/4/posts/?cursor=jabadabar',
previous: null,
results: posts,
},
@ -325,7 +325,7 @@ describe('post actions', () => {
{ type: actions.REQUEST_POSTS },
{
type: actions.RECEIVE_POSTS,
next: 'https://durp.com/api/rules/4/posts/?cursor=jabadabar&read=false',
next: 'https://durp.com/api/rules/4/posts/?cursor=jabadabar',
posts,
},
];
@ -373,10 +373,10 @@ describe('post actions', () => {
type: constants.CATEGORY_TYPE,
};
fetchMock.getOnce('/api/categories/1/posts/?read=false', {
fetchMock.getOnce('/api/categories/1/posts/', {
body: {
count: 2,
next: 'https://durp.com/api/categories/4/posts/?cursor=jabadabar&read=false',
next: 'https://durp.com/api/categories/4/posts/?cursor=jabadabar',
previous: null,
results: posts,
},
@ -394,7 +394,7 @@ describe('post actions', () => {
{ type: actions.REQUEST_POSTS },
{
type: actions.RECEIVE_POSTS,
next: 'https://durp.com/api/categories/4/posts/?cursor=jabadabar&read=false',
next: 'https://durp.com/api/categories/4/posts/?cursor=jabadabar',
posts,
},
];
@ -600,7 +600,7 @@ describe('post actions', () => {
const errorMessage = 'Page not found';
fetchMock.getOnce(`/api/rules/${rule.id}/posts/?read=false`, () => {
fetchMock.getOnce(`/api/rules/${rule.id}/posts/`, () => {
throw new Error(errorMessage);
});

View file

@ -81,7 +81,7 @@ describe('post actions', () => {
const action = {
type: actions.RECEIVE_POSTS,
next: 'https://durp.com/api/rules/4/posts/?page=2&read=false',
next: 'https://durp.com/api/rules/4/posts/?page=2',
posts,
};

View file

@ -254,13 +254,13 @@ describe('selected reducer', () => {
const action = {
type: postActions.RECEIVE_POSTS,
next: 'https://durp.com/api/rules/4/posts/?page=2&read=false',
next: 'https://durp.com/api/rules/4/posts/?page=2',
posts,
};
const expectedState = {
...defaultState,
next: 'https://durp.com/api/rules/4/posts/?page=2&read=false',
next: 'https://durp.com/api/rules/4/posts/?page=2',
lastReached: false,
};

View file

@ -4,11 +4,3 @@ from django.utils.translation import gettext as _
class RuleTypeChoices(TextChoices):
feed = "feed", _("Feed")
subreddit = "subreddit", _("Subreddit")
twitter_timeline = "twitter_timeline", _("Twitter timeline")
class TwitterPostTypeChoices(TextChoices):
photo = "photo", _("Photo")
video = "video", _("Video")
animated_gif = "animated_gif", _("GIF")

View file

@ -1,3 +1,5 @@
from django.db.models import Prefetch
from rest_framework import status
from rest_framework.generics import (
GenericAPIView,
@ -10,7 +12,6 @@ from rest_framework.response import Response
from newsreader.core.pagination import CursorPagination
from newsreader.news.collection.models import CollectionRule
from newsreader.news.collection.serializers import RuleSerializer
from newsreader.news.core.filters import ReadFilter
from newsreader.news.core.models import Post
from newsreader.news.core.serializers import PostSerializer
@ -24,7 +25,6 @@ class NestedRuleView(ListAPIView):
queryset = CollectionRule.objects.prefetch_related("posts").all()
serializer_class = PostSerializer
pagination_class = CursorPagination
filter_backends = [ReadFilter]
def get_queryset(self):
lookup_url_kwarg = self.lookup_url_kwarg or self.lookup_field
@ -33,7 +33,9 @@ class NestedRuleView(ListAPIView):
# filtered on the user.
filter_kwargs = {self.lookup_field: self.kwargs[lookup_url_kwarg]}
rule = get_object_or_404(self.queryset, **filter_kwargs)
prefetch = Prefetch("posts", queryset=Post.objects.filter(read=False))
queryset = CollectionRule.objects.prefetch_related(prefetch)
rule = get_object_or_404(queryset, **filter_kwargs)
self.check_object_permissions(self.request, rule)
return rule.posts.order_by("-publication_date")

View file

@ -1,10 +1,9 @@
from newsreader.news.collection.forms.feed import FeedForm, OPMLImportForm
from newsreader.news.collection.forms.reddit import SubRedditForm
from newsreader.news.collection.forms.rules import CollectionRuleBulkForm
__all__ = [
"FeedForm",
"OPMLImportForm",
"SubRedditForm",
"CollectionRuleBulkForm",
]

View file

@ -1,57 +0,0 @@
from django import forms
from django.core.exceptions import ValidationError
from django.utils.safestring import mark_safe
from django.utils.translation import gettext_lazy as _
from newsreader.news.collection.choices import RuleTypeChoices
from newsreader.news.collection.forms.base import CollectionRuleForm
from newsreader.news.collection.models import CollectionRule
from newsreader.news.collection.reddit import REDDIT_API_URL
def get_reddit_help_text():
return mark_safe(
"Only subreddits are supported"
" see the 'listings' section in <a className='link' target='_blank' rel='noopener noreferrer'"
" href='https://www.reddit.com/dev/api#section_listings'>the reddit API docs</a>."
" For example: <a className='link' target='_blank' rel='noopener noreferrer'"
" href='https://oauth.reddit.com/r/aww'>https://oauth.reddit.com/r/aww</a>"
)
class SubRedditForm(CollectionRuleForm):
url = forms.URLField(max_length=1024, help_text=get_reddit_help_text)
def clean_url(self):
url = self.cleaned_data["url"]
if not url.startswith(REDDIT_API_URL):
raise ValidationError(_("This does not look like an Reddit API URL"))
return url
def save(self, commit=True):
instance = super().save(commit=False)
instance.type = RuleTypeChoices.subreddit
if commit:
instance.save()
self.save_m2m()
return instance
class Meta:
model = CollectionRule
fields = (
"name",
"url",
"favicon",
"category",
"reddit_allow_nfsw",
"reddit_allow_spoiler",
"reddit_allow_viewed",
"reddit_upvotes_min",
"reddit_downvotes_max",
"reddit_comments_min",
)

View file

@ -0,0 +1,47 @@
# Generated by Django 4.2.16 on 2025-03-26 08:46
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("collection", "0017_remove_collectionrule_timezone"),
]
operations = [
migrations.RemoveField(
model_name="collectionrule",
name="reddit_allow_nfsw",
),
migrations.RemoveField(
model_name="collectionrule",
name="reddit_allow_spoiler",
),
migrations.RemoveField(
model_name="collectionrule",
name="reddit_allow_viewed",
),
migrations.RemoveField(
model_name="collectionrule",
name="reddit_comments_min",
),
migrations.RemoveField(
model_name="collectionrule",
name="reddit_downvotes_max",
),
migrations.RemoveField(
model_name="collectionrule",
name="reddit_upvotes_min",
),
migrations.RemoveField(
model_name="collectionrule",
name="screen_name",
),
migrations.AlterField(
model_name="collectionrule",
name="type",
field=models.CharField(
choices=[("feed", "Feed")], default="feed", max_length=20
),
),
]

View file

@ -1,5 +1,4 @@
from django.db import models
from django.conf import settings
from django.urls import reverse
from django.utils.translation import gettext as _
@ -49,25 +48,6 @@ class CollectionRule(TimeStampedModel):
on_delete=models.CASCADE,
)
# Reddit
reddit_allow_nfsw = models.BooleanField(_("Allow NSFW posts"), default=False)
reddit_allow_spoiler = models.BooleanField(_("Allow spoilers"), default=False)
reddit_allow_viewed = models.BooleanField(
_("Allow already seen posts"), default=True
)
reddit_upvotes_min = models.PositiveIntegerField(
_("Minimum amount of upvotes"), default=0
)
reddit_downvotes_max = models.PositiveIntegerField(
_("Maximum amount of downvotes"), blank=True, null=True
)
reddit_comments_min = models.PositiveIntegerField(
_("Minimum amount of comments"), default=0
)
# Twitter (legacy)
screen_name = models.CharField(max_length=255, blank=True, null=True)
objects = CollectionRuleQuerySet.as_manager()
def __str__(self):
@ -75,22 +55,10 @@ class CollectionRule(TimeStampedModel):
@property
def update_url(self):
if self.type == RuleTypeChoices.subreddit:
return reverse("news:collection:subreddit-update", kwargs={"pk": self.pk})
elif self.type == RuleTypeChoices.twitter_timeline:
return "#not-supported"
return reverse("news:collection:feed-update", kwargs={"pk": self.pk})
@property
def source_url(self):
if self.type == RuleTypeChoices.subreddit:
from newsreader.news.collection.reddit import REDDIT_API_URL, REDDIT_URL
return self.url.replace(REDDIT_API_URL, REDDIT_URL)
elif self.type == RuleTypeChoices.twitter_timeline:
return f"{settings.TWITTER_URL}/{self.screen_name}"
return self.url
@property

View file

@ -1,419 +0,0 @@
import logging
from concurrent.futures import ThreadPoolExecutor, as_completed
from datetime import datetime, timedelta, timezone
from html import unescape
from json.decoder import JSONDecodeError
from urllib.parse import urlencode
from uuid import uuid4
from django.conf import settings
from django.core.cache import cache
from django.utils.html import format_html
import requests
from newsreader.news.collection.base import (
PostBuilder,
PostClient,
PostCollector,
PostStream,
Scheduler,
)
from newsreader.news.collection.choices import RuleTypeChoices
from newsreader.news.collection.exceptions import (
BuilderDuplicateException,
BuilderException,
BuilderMissingDataException,
BuilderParseException,
BuilderSkippedException,
StreamDeniedException,
StreamException,
StreamParseException,
StreamTooManyException,
)
from newsreader.news.collection.models import CollectionRule
from newsreader.news.collection.tasks import RedditTokenTask
from newsreader.news.collection.utils import fetch, post, truncate_text
from newsreader.news.core.models import Post
logger = logging.getLogger(__name__)
REDDIT_URL = "https://www.reddit.com"
REDDIT_API_URL = "https://oauth.reddit.com"
RATE_LIMIT = 60
RATE_LIMIT_DURATION = timedelta(seconds=60)
REDDIT_IMAGE_EXTENSIONS = (".jpg", ".png", ".gif")
REDDIT_VIDEO_EXTENSIONS = (".mp4", ".gifv", ".webm")
# see type prefixes on https://www.reddit.com/dev/api/
REDDIT_POST = "t3"
def get_reddit_authorization_url(user):
state = str(uuid4())
cache.set(f"{user.email}-reddit-auth", state)
params = {
"client_id": settings.REDDIT_CLIENT_ID,
"redirect_uri": settings.REDDIT_REDIRECT_URL,
"state": state,
"response_type": "code",
"duration": "permanent",
"scope": "identity,mysubreddits,save,read",
}
authorization_url = f"{REDDIT_URL}/api/v1/authorize"
return f"{authorization_url}?{urlencode(params)}"
def get_reddit_access_token(code, user):
client_auth = requests.auth.HTTPBasicAuth(
settings.REDDIT_CLIENT_ID, settings.REDDIT_CLIENT_SECRET
)
response = post(
f"{REDDIT_URL}/api/v1/access_token",
data={
"redirect_uri": settings.REDDIT_REDIRECT_URL,
"grant_type": "authorization_code",
"code": code,
},
auth=client_auth,
)
response_data = response.json()
user.reddit_access_token = response_data["access_token"]
user.reddit_refresh_token = response_data["refresh_token"]
user.save()
cache.delete(f"{user.email}-reddit-auth")
return response_data["access_token"], response_data["refresh_token"]
# Note that the API always returns 204's with correct basic auth headers
def revoke_reddit_token(user):
client_auth = requests.auth.HTTPBasicAuth(
settings.REDDIT_CLIENT_ID, settings.REDDIT_CLIENT_SECRET
)
response = post(
f"{REDDIT_URL}/api/v1/revoke_token",
data={"token": user.reddit_refresh_token, "token_type_hint": "refresh_token"},
auth=client_auth,
)
return response.status_code == 204
class RedditBuilder(PostBuilder):
rule_type = RuleTypeChoices.subreddit
def build(self):
results = {}
if "data" not in self.payload or "children" not in self.payload["data"]:
return
entries = self.payload["data"]["children"]
for entry in entries:
try:
post = self.build_post(entry)
except BuilderDuplicateException:
logger.warning("Skipping duplicate post")
continue
except BuilderSkippedException as e:
logger.warning(e.message)
continue
except BuilderException:
logger.exception("Failed building post")
continue
identifier = post.remote_identifier
results[identifier] = post
self.instances = results.values()
def build_post(self, entry):
rule = self.stream.rule
entry_data = entry.get("data", {})
remote_identifier = entry_data.get("id", "")
kind = entry.get("kind")
if remote_identifier in self.existing_posts:
raise BuilderDuplicateException(payload=entry)
elif kind != REDDIT_POST:
raise BuilderParseException(
message=f"Payload is not an reddit post, its of kind {kind}",
payload=entry,
)
elif not entry_data:
raise BuilderMissingDataException(
message=f"Post {remote_identifier} did not contain any data",
payload=entry,
)
try:
title = entry_data["title"]
author = entry_data["author"]
post_url_fragment = entry_data["permalink"]
direct_url = entry_data["url"]
is_text = entry_data["is_self"]
is_video = entry_data["is_video"]
is_nsfw = entry_data["over_18"]
is_spoiler = entry_data["spoiler"]
is_viewed = entry_data["clicked"]
upvotes = entry_data["ups"]
downvotes = entry_data["downs"]
comments = entry_data["num_comments"]
except KeyError as e:
raise BuilderMissingDataException(payload=entry) from e
if not rule.reddit_allow_nfsw and is_nsfw:
raise BuilderSkippedException("Rule does not allow NSFW posts")
elif not rule.reddit_allow_spoiler and is_spoiler:
raise BuilderSkippedException("Rule does not allow spoilers")
elif not rule.reddit_allow_viewed and is_viewed:
raise BuilderSkippedException("Post was already seen by user")
elif not upvotes >= rule.reddit_upvotes_min:
raise BuilderSkippedException(
"Post does not meet minimum amount of upvotes"
)
elif (
rule.reddit_downvotes_max is not None
and downvotes > rule.reddit_downvotes_max
):
raise BuilderSkippedException("Post has more downvotes than allowed")
elif not comments >= rule.reddit_comments_min:
raise BuilderSkippedException("Post does not have enough comments")
title = truncate_text(Post, "title", title)
author = truncate_text(Post, "author", author)
if is_text:
body = self.get_text_post(entry_data)
elif direct_url.endswith(REDDIT_IMAGE_EXTENSIONS):
body = self.get_image_post(title, direct_url)
elif is_video:
body = self.get_native_video_post(entry_data)
elif direct_url.endswith(REDDIT_VIDEO_EXTENSIONS):
body = self.get_video_post(direct_url)
else:
body = self.get_url_post(title, direct_url)
try:
_created_date = datetime.fromtimestamp(entry_data["created_utc"])
created_date = _created_date.replace(tzinfo=timezone.utc)
except (OverflowError, OSError) as e:
raise BuilderParseException(payload=entry) from e
except KeyError as e:
raise BuilderMissingDataException(payload=entry) from e
post_entry = {
"remote_identifier": remote_identifier,
"title": title,
"body": body,
"author": author,
"url": f"{REDDIT_URL}{post_url_fragment}",
"publication_date": created_date,
"rule": rule,
}
return Post(**post_entry)
def get_text_post(self, entry):
try:
uncleaned_body = entry["selftext_html"]
except KeyError as e:
raise BuilderMissingDataException(payload=entry) from e
unescaped_body = unescape(uncleaned_body) if uncleaned_body else ""
return self.sanitize_fragment(unescaped_body) if unescaped_body else ""
def get_image_post(self, title, url):
return format_html(
"<div><img alt='{title}' src='{url}' loading='lazy' /></div>",
url=url,
title=title,
)
def get_native_video_post(self, entry):
try:
video_info = entry["secure_media"]["reddit_video"]
except KeyError as e:
raise BuilderMissingDataException(payload=entry) from e
return format_html(
"<div><video controls muted><source src='{url}' type='video/mp4' /></video></div>",
url=video_info["fallback_url"],
)
def get_video_post(self, url):
extension = next(
extension.replace(".", "")
for extension in REDDIT_VIDEO_EXTENSIONS
if url.endswith(extension)
)
if extension == "gifv":
return format_html(
"<div><video controls muted><source src='{url}' type='video/mp4' /></video></div>",
url=url.replace(extension, "mp4"),
)
return format_html(
"<div><video controls muted><source src='{url}' type='video/{extension}' /></video></div>",
url=url,
extension=extension,
)
def get_url_post(self, title, url):
return format_html(
"<div><a target='_blank' rel='noopener noreferrer' alt='{title}' href='{url}' class='link'>Direct url</a></div>",
url=url,
title=title,
)
def __str__(self):
return f"{self.stream.rule.pk}: RedditBuilder"
class RedditStream(PostStream):
rule_type = RuleTypeChoices.subreddit
headers = {}
def __init__(self, rule):
super().__init__(rule)
self.headers = {"Authorization": f"bearer {self.rule.user.reddit_access_token}"}
def read(self):
response = fetch(self.rule.url, headers=self.headers)
return self.parse(response), self
def parse(self, response):
try:
return response.json()
except JSONDecodeError as e:
raise StreamParseException(
response=response, message="Failed parsing json"
) from e
def __str__(self):
return f"{self.rule.pk}: RedditStream"
class RedditClient(PostClient):
stream = RedditStream
def __enter__(self):
streams = [[self.stream(rule) for rule in batch] for batch in self.rules]
rate_limitted = False
with ThreadPoolExecutor(max_workers=10) as executor:
for batch in streams:
futures = {executor.submit(stream.read): stream for stream in batch}
if rate_limitted:
logger.warning("Aborting requests, ratelimit hit")
break
for future in as_completed(futures):
stream = futures[future]
try:
response_data = future.result()
stream.rule.error = None
stream.rule.succeeded = True
yield response_data
except StreamDeniedException as e:
logger.warning(
f"Access token expired for user {stream.rule.user.pk}"
)
stream.rule.user.reddit_access_token = None
stream.rule.user.save()
self.set_rule_error(stream.rule, e)
RedditTokenTask.delay(stream.rule.user.pk)
break
except StreamTooManyException as e:
logger.exception("Ratelimit hit, aborting batched subreddits")
self.set_rule_error(stream.rule, e)
rate_limitted = True
break
except StreamException as e:
logger.exception(
f"Stream failed reading content from {stream.rule.url}"
)
self.set_rule_error(stream.rule, e)
continue
finally:
stream.rule.last_run = datetime.now(tz=timezone.utc)
stream.rule.save()
class RedditCollector(PostCollector):
builder = RedditBuilder
client = RedditClient
class RedditScheduler(Scheduler):
max_amount = RATE_LIMIT
max_user_amount = RATE_LIMIT / 4
def __init__(self, subreddits=[]):
if not subreddits:
self.subreddits = CollectionRule.objects.filter(
type=RuleTypeChoices.subreddit,
user__reddit_access_token__isnull=False,
user__reddit_refresh_token__isnull=False,
enabled=True,
).order_by("last_run")[:200]
else:
self.subreddits = subreddits
def get_scheduled_rules(self):
rule_mapping = {}
current_amount = 0
for subreddit in self.subreddits:
user_pk = subreddit.user.pk
if current_amount == self.max_amount:
break
if user_pk in rule_mapping:
max_amount_reached = len(rule_mapping[user_pk]) == self.max_user_amount
if max_amount_reached:
continue
rule_mapping[user_pk].append(subreddit)
current_amount += 1
continue
rule_mapping[user_pk] = [subreddit]
current_amount += 1
return list(rule_mapping.values())

View file

@ -1,9 +1,4 @@
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.core.mail import send_mail
from django.utils.translation import gettext as _
import requests
from celery.exceptions import Reject
from celery.utils.log import get_task_logger
@ -11,9 +6,7 @@ from celery.utils.log import get_task_logger
from newsreader.accounts.models import User
from newsreader.celery import app
from newsreader.news.collection.choices import RuleTypeChoices
from newsreader.news.collection.exceptions.stream import StreamException
from newsreader.news.collection.feed import FeedCollector
from newsreader.news.collection.utils import post
from newsreader.utils.celery import MemCacheLock
@ -49,84 +42,6 @@ class FeedTask(app.Task):
raise Reject(reason="Task already running", requeue=False)
class RedditTask(app.Task):
name = "RedditTask"
ignore_result = True
def run(self):
from newsreader.news.collection.reddit import RedditCollector, RedditScheduler
with MemCacheLock("reddit-task", self.app.oid) as acquired:
if acquired:
logger.info("Running reddit task")
scheduler = RedditScheduler()
subreddits = scheduler.get_scheduled_rules()
collector = RedditCollector()
collector.collect(rules=subreddits)
else:
logger.warning("Cancelling task due to existing lock")
raise Reject(reason="Task already running", requeue=False)
class RedditTokenTask(app.Task):
name = "RedditTokenTask"
ignore_result = True
def run(self, user_pk):
from newsreader.news.collection.reddit import REDDIT_URL
try:
user = User.objects.get(pk=user_pk)
except ObjectDoesNotExist:
message = f"User {user_pk} does not exist"
logger.exception(message)
raise Reject(reason=message, requeue=False)
if not user.reddit_refresh_token:
raise Reject(reason=f"User {user_pk} has no refresh token", requeue=False)
client_auth = requests.auth.HTTPBasicAuth(
settings.REDDIT_CLIENT_ID, settings.REDDIT_CLIENT_SECRET
)
try:
response = post(
f"{REDDIT_URL}/api/v1/access_token",
data={
"grant_type": "refresh_token",
"refresh_token": user.reddit_refresh_token,
},
auth=client_auth,
)
except StreamException:
logger.exception(
f"Failed refreshing reddit access token for user {user_pk}"
)
user.reddit_refresh_token = None
user.save()
message = _(
"Your Reddit account credentials have expired. Re-authenticate in"
" the settings page to keep retrieving Reddit specific information"
" from your account."
)
send_mail(
"Reddit account needs re-authentication", message, None, [user.email]
)
return
response_data = response.json()
user.reddit_access_token = response_data["access_token"]
user.save()
class FaviconTask(app.Task):
name = "FaviconTask"
ignore_result = True
@ -150,15 +65,6 @@ class FaviconTask(app.Task):
collector = FaviconCollector()
collector.collect(rules=rules)
third_party_rules = user.rules.enabled().exclude(
type=RuleTypeChoices.feed
)
for rule in third_party_rules:
if rule.type == RuleTypeChoices.subreddit:
rule.favicon = "https://www.reddit.com/favicon.ico"
rule.save()
else:
logger.warning("Cancelling task due to existing lock")
@ -167,5 +73,3 @@ class FaviconTask(app.Task):
FeedTask = app.register_task(FeedTask())
FaviconTask = app.register_task(FaviconTask())
RedditTask = app.register_task(RedditTask())
RedditTokenTask = app.register_task(RedditTokenTask())

View file

@ -2,139 +2,137 @@
{% load i18n static filters %}
{% block content %}
<main id="rules--page" class="main" data-render-sidebar=true>
<div class="main__container">
<form class="form rules-form">
{% csrf_token %}
<main id="rules--page" class="main" data-render-sidebar=true>
<div class="main__container">
<form class="form rules-form">
{% csrf_token %}
<section class="section form__section form__section--actions">
<div class="form__actions">
<a class="link button button--confirm" href="{% url "news:collection:feed-create" %}">{% trans "Add a feed" %}</a>
<a class="link button button--confirm" href="{% url "news:collection:import" %}">{% trans "Import feeds" %}</a>
<a class="link button button--reddit" href="{% url "news:collection:subreddit-create" %}">{% trans "Add a subreddit" %}</a>
</div>
</section>
<section class="section form__section form__section--actions">
<fieldset class="fieldset form__fieldset">
<input type="submit" class="button button--primary" formaction="{% url "news:collection:rules-enable" %}" formmethod="post" value="{% trans "Enable" %}" />
<input type="submit" class="button button--primary" formaction="{% url "news:collection:rules-disable" %}" formmethod="post" value="{% trans "Disable" %}" />
<input type="submit" class="button button--error" formaction="{% url "news:collection:rules-delete" %}" formmethod="post" value="{% trans "Delete" %}"/>
</fieldset>
</section>
<section class="section form__section">
<table class="table rules-table" border="0" cellspacing="0">
<thead class="table__header rules-table__header">
<tr class="table__row rules-table__row">
<th class="table__heading rules-table__heading--select">
{% include "components/form/checkbox.html" with id="select-all" data_input="rules" id_for_label="select-all" %}
</th>
<th class="table__heading rules-table__heading rules-table__heading--name">{% trans "Name" %}</th>
<th class="table__heading rules-table__heading rules-table__heading--category">{% trans "Category" %}</th>
<th class="table__heading rules-table__heading rules-table__heading--url">{% trans "URL" %}</th>
<th class="table__heading rules-table__heading rules-table__heading--succeeded">{% trans "Successfuly ran" %}</th>
<th class="table__heading rules-table__heading rules-table__heading--enabled">{% trans "Enabled" %}</th>
</tr>
</thead>
<tbody class="table__body rules-table__body">
{% for rule in rules %}
<tr class="table__row {% if rule.failed %}table__row--error{% endif %} rules-table__row">
<td class="table__item rules-table__item--select">
{% with rule|id_for_label:"rules" as id_for_label %}
{% include "components/form/checkbox.html" with name="rules" value=rule.pk id=id_for_label id_for_label=id_for_label %}
{% endwith %}
</td>
<td
class="table__item rules-table__item rules-table__item--name"
title="{{ rule.name }}"
>
<a class="link" href="{{ rule.update_url }}">
{{ rule.name }}
</a>
</td>
<td
class="table__item rules-table__item rules-table__item--category"
title="{{ rule.category.name }}"
>
{% if rule.category %}
<a
class="link"
href="{% url 'news:core:category-update' pk=rule.category.pk %}"
>
{{ rule.category.name }}
</a>
{% endif %}
</td>
<td
class="table__item rules-table__item rules-table__item--url"
title="{{ rule.source_url }}"
>
<a
class="link"
href="{{ rule.source_url }}"
target="_blank"
rel="noopener noreferrer"
>
{{ rule.source_url }}
</a>
</td>
<td class="table__item rules-table__item rules-table__item--failed">
{% if rule.failed %}
<i class="fas fa-exclamation-triangle"></i>
{% else %}
<i class="fas fa-check"></i>
{% endif %}
</td>
<td class="table__item rules-table__item rules-table__item--enabled">
{% if rule.enabled %}
<i class="fas fa-check"></i>
{% else %}
<i class="fas fa-pause"></i>
{% endif %}
</td>
</tr>
{% endfor %}
</tbody>
</table>
</section>
</form>
<div class="table__footer">
<div class="pagination">
<span class="pagination__previous">
{% if page_obj.has_previous %}
<a class="link button" href="?page=1">{% trans "first" %}</a>
<a class="link button" href="?page={{ page_obj.previous_page_number }}">
{% trans "previous" %}
</a>
{% endif %}
</span>
<span class="pagination__current">
{% blocktrans with current_number=page_obj.number total_count=page_obj.paginator.num_pages %}
Page {{ current_number }} of {{ total_count }}
{% endblocktrans %}
</span>
<span class="pagination__next">
{% if page_obj.has_next %}
<a class="link button" href="?page={{ page_obj.next_page_number }}">
{% trans "next" %}
</a>
<a class="link button" href="?page={{ page_obj.paginator.num_pages }}">
{% trans "last" %}
</a>
{% endif %}
</span>
<section class="section form__section form__section--actions">
<div class="form__actions">
<a class="link button button--confirm"
href="{% url 'news:collection:feed-create' %}">{% trans "Add a feed" %}</a>
<a class="link button button--confirm"
href="{% url 'news:collection:import' %}">{% trans "Import feeds" %}</a>
</div>
</section>
<section class="section form__section form__section--actions">
<fieldset class="fieldset form__fieldset">
<input type="submit" class="button button--primary" formaction="{% url 'news:collection:rules-enable' %}"
formmethod="post" value="{% trans " Enable" %}" />
<input type="submit" class="button button--primary" formaction="{% url 'news:collection:rules-disable' %}"
formmethod="post" value="{% trans " Disable" %}" />
<input type="submit" class="button button--error" formaction="{% url 'news:collection:rules-delete' %}"
formmethod="post" value="{% trans " Delete" %}" />
</fieldset>
</section>
<section class="section form__section">
<table class="table rules-table" border="0" cellspacing="0">
<thead class="table__header rules-table__header">
<tr class="table__row rules-table__row">
<th class="table__heading rules-table__heading--select">
{% include "components/form/checkbox.html" with id="select-all" data_input="rules" id_for_label="select-all" %}
</th>
<th class="table__heading rules-table__heading rules-table__heading--name">
{% trans "Name" %}
</th>
<th class="table__heading rules-table__heading rules-table__heading--category">
{% trans "Category" %}
</th>
<th class="table__heading rules-table__heading rules-table__heading--url">
{% trans "URL" %}
</th>
<th class="table__heading rules-table__heading rules-table__heading--succeeded">
{% trans "Successfuly ran" %}
</th>
<th class="table__heading rules-table__heading rules-table__heading--enabled">
{% trans "Enabled" %}
</th>
</tr>
</thead>
<tbody class="table__body rules-table__body">
{% for rule in rules %}
<tr class="table__row {% if rule.failed %}table__row--error{% endif %} rules-table__row">
<td class="table__item rules-table__item--select">
{% with rule|id_for_label:"rules" as id_for_label %}
{% include "components/form/checkbox.html" with name="rules" value=rule.pk id=id_for_label
id_for_label=id_for_label %}
{% endwith %}
</td>
<td class="table__item rules-table__item rules-table__item--name" title="{{ rule.name }}">
<a class="link" href="{{ rule.update_url }}">
{{ rule.name }}
</a>
</td>
<td class="table__item rules-table__item rules-table__item--category" title="{{ rule.category.name }}">
{% if rule.category %}
<a class="link" href="{% url 'news:core:category-update' pk=rule.category.pk %}">
{{ rule.category.name }}
</a>
{% endif %}
</td>
<td class="table__item rules-table__item rules-table__item--url" title="{{ rule.source_url }}">
<a class="link" href="{{ rule.source_url }}" target="_blank" rel="noopener noreferrer">
{{ rule.source_url }}
</a>
</td>
<td class="table__item rules-table__item rules-table__item--failed">
{% if rule.failed %}
<i class="fas fa-exclamation-triangle"></i>
{% else %}
<i class="fas fa-check"></i>
{% endif %}
</td>
<td class="table__item rules-table__item rules-table__item--enabled">
{% if rule.enabled %}
<i class="fas fa-check"></i>
{% else %}
<i class="fas fa-pause"></i>
{% endif %}
</td>
</tr>
{% endfor %}
</tbody>
</table>
</section>
</form>
<div class="table__footer">
<div class="pagination">
<span class="pagination__previous">
{% if page_obj.has_previous %}
<a class="link button" href="?page=1">{% trans "first" %}</a>
<a class="link button" href="?page={{ page_obj.previous_page_number }}">
{% trans "previous" %}
</a>
{% endif %}
</span>
<span class="pagination__current">
{% blocktrans with current_number=page_obj.number total_count=page_obj.paginator.num_pages %}
Page {{ current_number }} of {{ total_count }}
{% endblocktrans %}
</span>
<span class="pagination__next">
{% if page_obj.has_next %}
<a class="link button" href="?page={{ page_obj.next_page_number }}">
{% trans "next" %}
</a>
<a class="link button" href="?page={{ page_obj.paginator.num_pages }}">
{% trans "last" %}
</a>
{% endif %}
</span>
</div>
</div>
</main>
</div>
</main>
{% endblock %}

View file

@ -1,13 +0,0 @@
{% extends "sidebar.html" %}
{% load static %}
{% block content %}
{% url "news:collection:rules" as cancel_url %}
<main id="subreddit--page" class="main" data-render-sidebar=true>
<div class="main__container">
{% include "components/form/form.html" with form=form title="Add a subreddit" cancel_url=cancel_url confirm_text="Add subrredit" %}
</div>
</main>
{% endblock %}

View file

@ -1,14 +0,0 @@
{% extends "base.html" %}
{% load static i18n %}
{% block content %}
<main id="subreddit--page" class="main">
{% if subreddit.error %}
{% trans "Failed to retrieve posts" as title %}
{% include "components/textbox/textbox.html" with title=title body=subreddit.error class="text-section--error" only %}
{% endif %}
{% url "news:collection:rules" as cancel_url %}
{% include "components/form/form.html" with form=form title="Update subreddit" cancel_url=cancel_url confirm_text="Save subreddit" %}
</main>
{% endblock %}

View file

@ -202,7 +202,7 @@ class NestedRuleListViewTestCase(TestCase):
with self.subTest(post=post):
self.assertEqual(post["rule"]["id"], rule.pk)
def test_unread_posts(self):
def test_posts(self):
rule = FeedFactory.create(user=self.user)
FeedPostFactory.create_batch(size=10, rule=rule, read=False)
@ -210,7 +210,6 @@ class NestedRuleListViewTestCase(TestCase):
response = self.client.get(
reverse("api:news:collection:rules-nested-posts", kwargs={"pk": rule.pk}),
{"read": "false"},
)
data = response.json()
@ -221,23 +220,3 @@ class NestedRuleListViewTestCase(TestCase):
for post in data["results"]:
with self.subTest(post=post):
self.assertEqual(post["read"], False)
def test_read_posts(self):
rule = FeedFactory.create(user=self.user)
FeedPostFactory.create_batch(size=20, rule=rule, read=False)
FeedPostFactory.create_batch(size=10, rule=rule, read=True)
response = self.client.get(
reverse("api:news:collection:rules-nested-posts", kwargs={"pk": rule.pk}),
{"read": "true"},
)
data = response.json()
self.assertEqual(response.status_code, 200)
self.assertEqual(len(data["results"]), 10)
for post in data["results"]:
with self.subTest(post=post):
self.assertEqual(post["read"], True)

View file

@ -3,7 +3,6 @@ import factory
from newsreader.accounts.tests.factories import UserFactory
from newsreader.news.collection.choices import RuleTypeChoices
from newsreader.news.collection.models import CollectionRule
from newsreader.news.collection.reddit import REDDIT_URL
class CollectionRuleFactory(factory.django.DjangoModelFactory):
@ -23,8 +22,3 @@ class CollectionRuleFactory(factory.django.DjangoModelFactory):
class FeedFactory(CollectionRuleFactory):
type = RuleTypeChoices.feed
class SubredditFactory(CollectionRuleFactory):
type = RuleTypeChoices.subreddit
website_url = REDDIT_URL

View file

@ -5,12 +5,12 @@ from django.test import TestCase
from newsreader.news.collection.favicon import FaviconBuilder
from newsreader.news.collection.tests.factories import CollectionRuleFactory
from newsreader.news.collection.tests.favicon.builder.mocks import (
simple_mock,
mock_without_url,
mock_without_header,
mock_with_weird_path,
mock_with_other_url,
mock_with_multiple_icons,
mock_with_other_url,
mock_with_weird_path,
mock_without_header,
mock_without_url,
simple_mock,
)

View file

@ -44,7 +44,7 @@ feed_mock = {
"base": "http://feeds.bbci.co.uk/news/rss.xml",
"language": None,
"type": "text/plain",
"value": "Trump's genocidal taunts will not " "end Iran - Zarif",
"value": "Trump's genocidal taunts will not end Iran - Zarif",
},
},
{
@ -83,7 +83,7 @@ feed_mock = {
"base": "http://feeds.bbci.co.uk/news/rss.xml",
"language": None,
"type": "text/plain",
"value": "Huawei's Android loss: How it " "affects you",
"value": "Huawei's Android loss: How it affects you",
},
},
{
@ -124,7 +124,7 @@ feed_mock = {
"base": "http://feeds.bbci.co.uk/news/rss.xml",
"language": None,
"type": "text/plain",
"value": "Birmingham head teacher threatened " "over LGBT lessons",
"value": "Birmingham head teacher threatened over LGBT lessons",
},
},
],

View file

@ -11,21 +11,21 @@ from newsreader.news.core.models import Post
from newsreader.news.core.tests.factories import FeedPostFactory
from .mocks import (
multiple_mock,
mock_with_html,
mock_with_long_author,
mock_with_long_exotic_title,
mock_with_long_title,
mock_with_longer_content_detail,
mock_with_multiple_content_detail,
mock_with_shorter_content_detail,
mock_with_update_entries,
mock_without_author,
mock_without_body,
mock_without_entries,
mock_without_identifier,
mock_without_publish_date,
mock_without_url,
mock_without_body,
mock_without_author,
mock_without_entries,
mock_with_update_entries,
mock_with_html,
mock_with_long_author,
mock_with_long_title,
mock_with_long_exotic_title,
mock_with_longer_content_detail,
mock_with_shorter_content_detail,
mock_with_multiple_content_detail,
multiple_mock,
)

View file

@ -42,7 +42,7 @@ simple_mock = {
"base": "http://feeds.bbci.co.uk/news/rss.xml",
"language": None,
"type": "text/plain",
"value": "Trump's 'genocidal taunts' will not " "end Iran - Zarif",
"value": "Trump's 'genocidal taunts' will not end Iran - Zarif",
},
}
],

View file

@ -42,7 +42,7 @@ multiple_mock = {
"base": "http://feeds.bbci.co.uk/news/rss.xml",
"language": None,
"type": "text/plain",
"value": "Trump's 'genocidal taunts' will not " "end Iran - Zarif",
"value": "Trump's 'genocidal taunts' will not end Iran - Zarif",
},
},
{
@ -81,7 +81,7 @@ multiple_mock = {
"base": "http://feeds.bbci.co.uk/news/rss.xml",
"language": None,
"type": "text/plain",
"value": "Huawei's Android loss: How it " "affects you",
"value": "Huawei's Android loss: How it affects you",
},
},
{
@ -122,7 +122,7 @@ multiple_mock = {
"base": "http://feeds.bbci.co.uk/news/rss.xml",
"language": None,
"type": "text/plain",
"value": "Birmingham head teacher threatened " "over LGBT lessons",
"value": "Birmingham head teacher threatened over LGBT lessons",
},
},
],
@ -212,7 +212,7 @@ duplicate_mock = {
"base": "http://feeds.bbci.co.uk/news/rss.xml",
"language": None,
"type": "text/plain",
"value": "Trump's 'genocidal taunts' will not " "end Iran - Zarif",
"value": "Trump's 'genocidal taunts' will not end Iran - Zarif",
},
},
{
@ -250,7 +250,7 @@ duplicate_mock = {
"base": "http://feeds.bbci.co.uk/news/rss.xml",
"language": None,
"type": "text/plain",
"value": "Huawei's Android loss: How it " "affects you",
"value": "Huawei's Android loss: How it affects you",
},
},
{
@ -290,7 +290,7 @@ duplicate_mock = {
"base": "http://feeds.bbci.co.uk/news/rss.xml",
"language": None,
"type": "text/plain",
"value": "Birmingham head teacher threatened " "over LGBT lessons",
"value": "Birmingham head teacher threatened over LGBT lessons",
},
},
],
@ -356,7 +356,7 @@ multiple_update_mock = {
"base": "http://feeds.bbci.co.uk/news/rss.xml",
"language": None,
"type": "text/plain",
"value": "Trump's 'genocidal taunts' will not " "end Iran - Zarif",
"value": "Trump's 'genocidal taunts' will not end Iran - Zarif",
},
},
{
@ -395,7 +395,7 @@ multiple_update_mock = {
"base": "http://feeds.bbci.co.uk/news/rss.xml",
"language": None,
"type": "text/plain",
"value": "Huawei's Android loss: How it " "affects you",
"value": "Huawei's Android loss: How it affects you",
},
},
{
@ -436,7 +436,7 @@ multiple_update_mock = {
"base": "http://feeds.bbci.co.uk/news/rss.xml",
"language": None,
"type": "text/plain",
"value": "Birmingham head teacher threatened " "over LGBT lessons",
"value": "Birmingham head teacher threatened over LGBT lessons",
},
},
],

View file

@ -73,12 +73,12 @@ simple_mock_parsed = {
"not think face coverings should be "
"mandatory in shops in England.",
},
"title": "Coronavirus: I trust people's sense on face masks - " "Gove",
"title": "Coronavirus: I trust people's sense on face masks - Gove",
"title_detail": {
"base": "",
"language": None,
"type": "text/plain",
"value": "Coronavirus: I trust people's sense " "on face masks - Gove",
"value": "Coronavirus: I trust people's sense on face masks - Gove",
},
},
{
@ -109,7 +109,7 @@ simple_mock_parsed = {
"base": "",
"language": None,
"type": "text/plain",
"value": "Farm outbreak leads 200 to self " "isolate",
"value": "Farm outbreak leads 200 to self isolate",
},
},
{
@ -137,12 +137,12 @@ simple_mock_parsed = {
"talks on tackling people "
"smuggling.",
},
"title": "English Channel search operation after migrant " "crossings",
"title": "English Channel search operation after migrant crossings",
"title_detail": {
"base": "",
"language": None,
"type": "text/plain",
"value": "English Channel search operation " "after migrant crossings",
"value": "English Channel search operation after migrant crossings",
},
},
],

File diff suppressed because it is too large Load diff

View file

@ -1,472 +0,0 @@
from datetime import datetime, timezone
from unittest.mock import Mock
from django.test import TestCase
from newsreader.news.collection.reddit import RedditBuilder
from newsreader.news.collection.tests.factories import SubredditFactory
from newsreader.news.collection.tests.reddit.builder.mocks import (
simple_mock,
empty_mock,
unknown_mock,
unsanitized_mock,
author_mock,
title_mock,
duplicate_mock,
image_mock,
external_image_mock,
video_mock,
external_video_mock,
external_gifv_mock,
nsfw_mock,
spoiler_mock,
seen_mock,
upvote_mock,
comment_mock,
downvote_mock,
)
from newsreader.news.core.models import Post
from newsreader.news.core.tests.factories import RedditPostFactory
class RedditBuilderTestCase(TestCase):
def setUp(self):
self.maxDiff = None
def test_simple_mock(self):
builder = RedditBuilder
subreddit = SubredditFactory()
mock_stream = Mock(rule=subreddit)
with builder(simple_mock, mock_stream) as builder:
builder.build()
builder.save()
posts = {post.remote_identifier: post for post in Post.objects.all()}
self.assertCountEqual(
("hm0qct", "hna75r", "hngs71", "hngsj8", "hnd7cy"), posts.keys()
)
post = posts["hm0qct"]
self.assertEqual(post.rule, subreddit)
self.assertEqual(
post.title,
"Linux Experiences/Rants or Education/Certifications thread - July 06, 2020",
)
self.assertIn(
" This megathread is also to hear opinions from anyone just starting out"
" with Linux or those that have used Linux (GNU or otherwise) for a long",
post.body,
)
self.assertIn(
"<p>For those looking for certifications please use this megathread to ask about how"
" to get certified whether it&#39;s for the business world or for your own satisfaction."
' Be sure to check out <a href="/r/linuxadmin">r/linuxadmin</a> for more discussion in the'
" SysAdmin world!</p>",
post.body,
)
self.assertEqual(post.author, "AutoModerator")
self.assertEqual(
post.url,
"https://www.reddit.com/r/linux/comments/hm0qct/linux_experiencesrants_or_educationcertifications/",
)
self.assertEqual(
post.publication_date, datetime(2020, 7, 6, 6, 11, 22, tzinfo=timezone.utc)
)
def test_empty_data(self):
builder = RedditBuilder
subreddit = SubredditFactory()
mock_stream = Mock(rule=subreddit)
with builder(empty_mock, mock_stream) as builder:
builder.build()
builder.save()
self.assertEqual(Post.objects.count(), 0)
def test_unknown_mock(self):
builder = RedditBuilder
subreddit = SubredditFactory()
mock_stream = Mock(rule=subreddit)
with builder(unknown_mock, mock_stream) as builder:
builder.build()
builder.save()
self.assertEqual(Post.objects.count(), 0)
def test_html_sanitizing(self):
builder = RedditBuilder
subreddit = SubredditFactory()
mock_stream = Mock(rule=subreddit)
with builder(unsanitized_mock, mock_stream) as builder:
builder.build()
builder.save()
posts = {post.remote_identifier: post for post in Post.objects.all()}
self.assertCountEqual(("hnd7cy",), posts.keys())
post = posts["hnd7cy"]
self.assertEqual(post.body, "<article></article>")
def test_long_author_text_is_truncated(self):
builder = RedditBuilder
subreddit = SubredditFactory()
mock_stream = Mock(rule=subreddit)
with builder(author_mock, mock_stream) as builder:
builder.build()
builder.save()
posts = {post.remote_identifier: post for post in Post.objects.all()}
self.assertCountEqual(("hnd7cy",), posts.keys())
post = posts["hnd7cy"]
self.assertEqual(post.author, "TheQuantumZeroTheQuantumZeroTheQuantumZ…")
def test_long_title_text_is_truncated(self):
builder = RedditBuilder
subreddit = SubredditFactory()
mock_stream = Mock(rule=subreddit)
with builder(title_mock, mock_stream) as builder:
builder.build()
builder.save()
posts = {post.remote_identifier: post for post in Post.objects.all()}
self.assertCountEqual(("hnd7cy",), posts.keys())
post = posts["hnd7cy"]
self.assertEqual(
post.title,
'Board statement on the LibreOffice 7.0 RC "Personal EditionBoard statement on the LibreOffice 7.0 RC "Personal Edition" label" labelBoard statement on the LibreOffice 7.0 RC "PersBoard statement on t…',
)
def test_duplicate_in_response(self):
builder = RedditBuilder
subreddit = SubredditFactory()
mock_stream = Mock(rule=subreddit)
with builder(duplicate_mock, mock_stream) as builder:
builder.build()
builder.save()
posts = {post.remote_identifier: post for post in Post.objects.all()}
self.assertEqual(Post.objects.count(), 2)
self.assertCountEqual(("hm0qct", "hna75r"), posts.keys())
def test_duplicate_in_database(self):
builder = RedditBuilder
subreddit = SubredditFactory()
mock_stream = Mock(rule=subreddit)
RedditPostFactory(remote_identifier="hm0qct", rule=subreddit, title="foo")
with builder(simple_mock, mock_stream) as builder:
builder.build()
builder.save()
posts = {post.remote_identifier: post for post in Post.objects.all()}
self.assertEqual(Post.objects.count(), 5)
self.assertCountEqual(
("hm0qct", "hna75r", "hngs71", "hngsj8", "hnd7cy"), posts.keys()
)
def test_image_post(self):
builder = RedditBuilder
subreddit = SubredditFactory()
mock_stream = Mock(rule=subreddit)
with builder(image_mock, mock_stream) as builder:
builder.build()
builder.save()
posts = {post.remote_identifier: post for post in Post.objects.all()}
self.assertCountEqual(("hr64xh", "hr4bxo", "hr14y5", "hr2fv0"), posts.keys())
post = posts["hr64xh"]
title = (
"Yall, I just cant... this is my "
"son, Judah. My wife and I have no "
"idea how we created such a "
"beautiful child."
)
url = "https://i.redd.it/cm2qybia1va51.jpg"
self.assertEqual(
"https://www.reddit.com/r/aww/comments/hr64xh/yall_i_just_cant_this_is_my_son_judah_my_wife_and/",
post.url,
)
self.assertEqual(
f"<div><img alt='{title}' src='{url}' loading='lazy' /></div>", post.body
)
def test_external_image_post(self):
builder = RedditBuilder
subreddit = SubredditFactory()
mock_stream = Mock(rule=subreddit)
with builder(external_image_mock, mock_stream) as builder:
builder.build()
builder.save()
posts = {post.remote_identifier: post for post in Post.objects.all()}
self.assertCountEqual(("hr41am", "huoldn"), posts.keys())
post = posts["hr41am"]
url = "http://gfycat.com/thatalivedogwoodclubgall"
title = "Excited cows have a new brush!"
self.assertEqual(
f"<div><a target='_blank' rel='noopener noreferrer' alt='{title}' href='{url}' class='link'>Direct url</a></div>",
post.body,
)
self.assertEqual(
"https://www.reddit.com/r/aww/comments/hr41am/excited_cows_have_a_new_brush/",
post.url,
)
post = posts["huoldn"]
url = "https://i.imgur.com/usfMVUJ.jpg"
title = "Novosibirsk Zoo welcomes 16 cobalt-eyed Pallass cat kittens"
self.assertEqual(
f"<div><img alt='{title}' src='{url}' loading='lazy' /></div>", post.body
)
self.assertEqual(
"https://www.reddit.com/r/aww/comments/huoldn/novosibirsk_zoo_welcomes_16_cobalteyed_pallass/",
post.url,
)
def test_video_post(self):
builder = RedditBuilder
subreddit = SubredditFactory()
mock_stream = Mock(rule=subreddit)
with builder(video_mock, mock_stream) as builder:
builder.build()
builder.save()
posts = {post.remote_identifier: post for post in Post.objects.all()}
self.assertCountEqual(("hr32jf", "hr1r00", "hqy0ny", "hr0uzh"), posts.keys())
post = posts["hr1r00"]
url = "https://v.redd.it/eyvbxaeqtta51/DASH_480.mp4?source=fallback"
self.assertEqual(
post.url,
"https://www.reddit.com/r/aww/comments/hr1r00/cool_catt_and_his_clingy_girlfriend/",
)
self.assertEqual(
f"<div><video controls muted><source src='{url}' type='video/mp4' /></video></div>",
post.body,
)
def test_external_video_post(self):
builder = RedditBuilder
subreddit = SubredditFactory()
mock_stream = Mock(rule=subreddit)
with builder(external_video_mock, mock_stream) as builder:
builder.build()
builder.save()
post = Post.objects.get()
self.assertEqual(post.remote_identifier, "hulh8k")
self.assertEqual(
post.url,
"https://www.reddit.com/r/aww/comments/hulh8k/dog_splashing_in_water/",
)
title = "Dog splashing in water"
url = "https://gfycat.com/excellentinfantileamericanwigeon"
self.assertEqual(
f"<div><a target='_blank' rel='noopener noreferrer' alt='{title}' href='{url}' class='link'>Direct url</a></div>",
post.body,
)
def test_external_gifv_video_post(self):
builder = RedditBuilder
subreddit = SubredditFactory()
mock_stream = Mock(rule=subreddit)
with builder(external_gifv_mock, mock_stream) as builder:
builder.build()
builder.save()
post = Post.objects.get()
self.assertEqual(post.remote_identifier, "humdlf")
self.assertEqual(
post.url, "https://www.reddit.com/r/aww/comments/humdlf/if_i_fits_i_sits/"
)
self.assertEqual(
"<div><video controls muted><source src='https://i.imgur.com/grVh2AG.mp4' type='video/mp4' /></video></div>",
post.body,
)
def test_link_only_post(self):
builder = RedditBuilder
subreddit = SubredditFactory()
mock_stream = Mock(rule=subreddit)
with builder(simple_mock, mock_stream) as builder:
builder.build()
builder.save()
post = Post.objects.get(remote_identifier="hngsj8")
title = "KeePassXC 2.6.0 released"
url = "https://keepassxc.org/blog/2020-07-07-2.6.0-released/"
self.assertIn(
f"<div><a target='_blank' rel='noopener noreferrer' alt='{title}' href='{url}' class='link'>Direct url</a></div>",
post.body,
)
self.assertEqual(
post.url,
"https://www.reddit.com/r/linux/comments/hngsj8/keepassxc_260_released/",
)
def test_skip_not_known_post_type(self):
builder = RedditBuilder
subreddit = SubredditFactory()
mock_stream = Mock(rule=subreddit)
with builder(unknown_mock, mock_stream) as builder:
builder.build()
builder.save()
self.assertEqual(Post.objects.count(), 0)
def test_nsfw_not_allowed(self):
builder = RedditBuilder
subreddit = SubredditFactory(reddit_allow_nfsw=False)
mock_stream = Mock(rule=subreddit)
with builder(nsfw_mock, mock_stream) as builder:
builder.build()
builder.save()
posts = {post.remote_identifier: post for post in Post.objects.all()}
self.assertEqual(Post.objects.count(), 1)
self.assertCountEqual(("hna75r",), posts.keys())
def test_spoiler_not_allowed(self):
builder = RedditBuilder
subreddit = SubredditFactory(reddit_allow_spoiler=False)
mock_stream = Mock(rule=subreddit)
with builder(spoiler_mock, mock_stream) as builder:
builder.build()
builder.save()
posts = {post.remote_identifier: post for post in Post.objects.all()}
self.assertEqual(Post.objects.count(), 1)
self.assertCountEqual(("hm0qct",), posts.keys())
def test_already_seen_not_allowed(self):
builder = RedditBuilder
subreddit = SubredditFactory(reddit_allow_viewed=False)
mock_stream = Mock(rule=subreddit)
with builder(seen_mock, mock_stream) as builder:
builder.build()
builder.save()
posts = {post.remote_identifier: post for post in Post.objects.all()}
self.assertEqual(Post.objects.count(), 1)
self.assertCountEqual(("hna75r",), posts.keys())
def test_upvote_minimum(self):
builder = RedditBuilder
subreddit = SubredditFactory(reddit_upvotes_min=100)
mock_stream = Mock(rule=subreddit)
with builder(upvote_mock, mock_stream) as builder:
builder.build()
builder.save()
posts = {post.remote_identifier: post for post in Post.objects.all()}
self.assertEqual(Post.objects.count(), 1)
self.assertCountEqual(("hna75r",), posts.keys())
def test_comments_minimum(self):
builder = RedditBuilder
subreddit = SubredditFactory(reddit_comments_min=100)
mock_stream = Mock(rule=subreddit)
with builder(comment_mock, mock_stream) as builder:
builder.build()
builder.save()
posts = {post.remote_identifier: post for post in Post.objects.all()}
self.assertEqual(Post.objects.count(), 1)
self.assertCountEqual(("hm0qct",), posts.keys())
def test_downvote_maximum(self):
builder = RedditBuilder
subreddit = SubredditFactory(reddit_downvotes_max=20)
mock_stream = Mock(rule=subreddit)
with builder(downvote_mock, mock_stream) as builder:
builder.build()
builder.save()
posts = {post.remote_identifier: post for post in Post.objects.all()}
self.assertEqual(Post.objects.count(), 1)
self.assertCountEqual(("hm0qct",), posts.keys())

View file

@ -1,160 +0,0 @@
# Note that some response data is truncated
simple_mock = {
"data": {
"after": "t3_hjywyf",
"before": None,
"children": [
{
"data": {
"approved_at_utc": None,
"approved_by": None,
"archived": False,
"author": "AutoModerator",
"banned_at_utc": None,
"banned_by": None,
"category": None,
"content_categories": None,
"created": 1593605471.0,
"created_utc": 1593576671.0,
"discussion_type": None,
"distinguished": "moderator",
"domain": "self.linux",
"edited": False,
"hidden": False,
"id": "hj34ck",
"locked": False,
"name": "t3_hj34ck",
"permalink": "/r/linux/comments/hj34ck/weekly_questions_and_hardware_thread_july_01_2020/",
"pinned": False,
"selftext": "Welcome to r/linux! If you're "
"new to Linux or trying to get "
"started this thread is for you. "
"Get help here or as always, "
"check out r/linuxquestions or "
"r/linux4noobs\n"
"\n"
"This megathread is for all your "
"question needs. As we don't "
"allow questions on r/linux "
"outside of this megathread, "
"please consider using "
"r/linuxquestions or "
"r/linux4noobs for the best "
"solution to your problem.\n"
"\n"
"Ask your hardware requests here "
"too or try r/linuxhardware!",
"selftext_html": "&lt;!-- SC_OFF "
"--&gt;&lt;div "
'class="md"&gt;&lt;p&gt;Welcome '
"to &lt;a "
'href="/r/linux"&gt;r/linux&lt;/a&gt;! '
"If you&amp;#39;re new to "
"Linux or trying to get "
"started this thread is for "
"you. Get help here or as "
"always, check out &lt;a "
'href="/r/linuxquestions"&gt;r/linuxquestions&lt;/a&gt; '
"or &lt;a "
'href="/r/linux4noobs"&gt;r/linux4noobs&lt;/a&gt;&lt;/p&gt;\n'
"\n"
"&lt;p&gt;This megathread is "
"for all your question "
"needs. As we don&amp;#39;t "
"allow questions on &lt;a "
'href="/r/linux"&gt;r/linux&lt;/a&gt; '
"outside of this megathread, "
"please consider using &lt;a "
'href="/r/linuxquestions"&gt;r/linuxquestions&lt;/a&gt; '
"or &lt;a "
'href="/r/linux4noobs"&gt;r/linux4noobs&lt;/a&gt; '
"for the best solution to "
"your problem.&lt;/p&gt;\n"
"\n"
"&lt;p&gt;Ask your hardware "
"requests here too or try "
"&lt;a "
'href="/r/linuxhardware"&gt;r/linuxhardware&lt;/a&gt;!&lt;/p&gt;\n'
"&lt;/div&gt;&lt;!-- SC_ON "
"--&gt;",
"spoiler": False,
"stickied": True,
"subreddit": "linux",
"subreddit_id": "t5_2qh1a",
"subreddit_name_prefixed": "r/linux",
"title": "Weekly Questions and Hardware " "Thread - July 01, 2020",
"url": "https://www.reddit.com/r/linux/comments/hj34ck/weekly_questions_and_hardware_thread_july_01_2020/",
"visited": False,
},
"kind": "t3",
},
{
"data": {
"archived": False,
"author": "AutoModerator",
"banned_at_utc": None,
"banned_by": None,
"category": None,
"created": 1593824903.0,
"created_utc": 1593796103.0,
"discussion_type": None,
"domain": "self.linux",
"edited": False,
"hidden": False,
"id": "hkmu0t",
"name": "t3_hkmu0t",
"permalink": "/r/linux/comments/hkmu0t/weekend_fluff_linux_in_the_wild_thread_july_03/",
"pinned": False,
"saved": False,
"selftext": "Welcome to the weekend! This "
"stickied thread is for you to "
"post pictures of your ubuntu "
"2006 install disk, slackware "
"floppies, on-topic memes or "
"more.\n"
"\n"
"When it's not the weekend, be "
"sure to check out "
"r/WildLinuxAppears or "
"r/linuxmemes!",
"selftext_html": "&lt;!-- SC_OFF "
"--&gt;&lt;div "
'class="md"&gt;&lt;p&gt;Welcome '
"to the weekend! This "
"stickied thread is for you "
"to post pictures of your "
"ubuntu 2006 install disk, "
"slackware floppies, "
"on-topic memes or "
"more.&lt;/p&gt;\n"
"\n"
"&lt;p&gt;When it&amp;#39;s "
"not the weekend, be sure to "
"check out &lt;a "
'href="/r/WildLinuxAppears"&gt;r/WildLinuxAppears&lt;/a&gt; '
"or &lt;a "
'href="/r/linuxmemes"&gt;r/linuxmemes&lt;/a&gt;!&lt;/p&gt;\n'
"&lt;/div&gt;&lt;!-- SC_ON "
"--&gt;",
"spoiler": False,
"stickied": True,
"subreddit": "linux",
"subreddit_id": "t5_2qh1a",
"subreddit_name_prefixed": "r/linux",
"subreddit_subscribers": 542073,
"subreddit_type": "public",
"thumbnail": "",
"title": "Weekend Fluff / Linux in the Wild "
"Thread - July 03, 2020",
"url": "https://www.reddit.com/r/linux/comments/hkmu0t/weekend_fluff_linux_in_the_wild_thread_july_03/",
"visited": False,
},
"kind": "t3",
},
],
"dist": 27,
"modhash": None,
},
"kind": "Listing",
}

View file

@ -1,163 +0,0 @@
from unittest.mock import Mock, patch
from uuid import uuid4
from django.test import TestCase
from django.utils.lorem_ipsum import words
from newsreader.accounts.tests.factories import UserFactory
from newsreader.news.collection.exceptions import (
StreamDeniedException,
StreamException,
StreamNotFoundException,
StreamParseException,
StreamTimeOutException,
StreamTooManyException,
)
from newsreader.news.collection.reddit import RedditClient
from newsreader.news.collection.tests.factories import SubredditFactory
from .mocks import simple_mock
class RedditClientTestCase(TestCase):
def setUp(self):
self.maxDiff = None
self.patched_read = patch("newsreader.news.collection.reddit.RedditStream.read")
self.mocked_read = self.patched_read.start()
def tearDown(self):
patch.stopall()
def test_client_retrieves_single_rules(self):
subreddit = SubredditFactory()
mock_stream = Mock(rule=subreddit)
self.mocked_read.return_value = (simple_mock, mock_stream)
with RedditClient([[subreddit]]) as client:
for data, stream in client:
with self.subTest(data=data, stream=stream):
self.assertEquals(data, simple_mock)
self.assertEquals(stream, mock_stream)
self.mocked_read.assert_called_once_with()
def test_client_catches_stream_exception(self):
subreddit = SubredditFactory()
self.mocked_read.side_effect = StreamException(message="Stream exception")
with RedditClient([[subreddit]]) as client:
for data, stream in client:
with self.subTest(data=data, stream=stream):
self.assertEquals(data, None)
self.assertEquals(stream, None)
self.assertEquals(stream.rule.error, "Stream exception")
self.assertEquals(stream.rule.succeeded, False)
self.mocked_read.assert_called_once_with()
def test_client_catches_stream_not_found_exception(self):
subreddit = SubredditFactory.create()
self.mocked_read.side_effect = StreamNotFoundException(
message="Stream not found"
)
with RedditClient([[subreddit]]) as client:
for data, stream in client:
with self.subTest(data=data, stream=stream):
self.assertEquals(data, None)
self.assertEquals(stream, None)
self.assertEquals(stream.rule.error, "Stream not found")
self.assertEquals(stream.rule.succeeded, False)
self.mocked_read.assert_called_once_with()
@patch("newsreader.news.collection.reddit.RedditTokenTask")
def test_client_catches_stream_denied_exception(self, mocked_task):
user = UserFactory(
reddit_access_token=str(uuid4()), reddit_refresh_token=str(uuid4())
)
subreddit = SubredditFactory(user=user)
self.mocked_read.side_effect = StreamDeniedException(message="Token expired")
with RedditClient([(subreddit,)]) as client:
results = [(data, stream) for data, stream in client]
self.mocked_read.assert_called_once_with()
mocked_task.delay.assert_called_once_with(user.pk)
self.assertEquals(len(results), 0)
user.refresh_from_db()
subreddit.refresh_from_db()
self.assertEquals(user.reddit_access_token, None)
self.assertEquals(subreddit.succeeded, False)
self.assertEquals(subreddit.error, "Token expired")
def test_client_catches_stream_timed_out_exception(self):
subreddit = SubredditFactory()
self.mocked_read.side_effect = StreamTimeOutException(
message="Stream timed out"
)
with RedditClient([[subreddit]]) as client:
for data, stream in client:
with self.subTest(data=data, stream=stream):
self.assertEquals(data, None)
self.assertEquals(stream, None)
self.assertEquals(stream.rule.error, "Stream timed out")
self.assertEquals(stream.rule.succeeded, False)
self.mocked_read.assert_called_once_with()
def test_client_catches_stream_too_many_exception(self):
subreddit = SubredditFactory()
self.mocked_read.side_effect = StreamTooManyException
with RedditClient([[subreddit]]) as client:
for data, stream in client:
with self.subTest(data=data, stream=stream):
self.assertEquals(data, None)
self.assertEquals(stream, None)
self.assertEquals(stream.rule.error, "Too many requests")
self.assertEquals(stream.rule.succeeded, False)
self.mocked_read.assert_called_once_with()
def test_client_catches_stream_parse_exception(self):
subreddit = SubredditFactory()
self.mocked_read.side_effect = StreamParseException(
message="Stream could not be parsed"
)
with RedditClient([[subreddit]]) as client:
for data, stream in client:
with self.subTest(data=data, stream=stream):
self.assertEquals(data, None)
self.assertEquals(stream, None)
self.assertEquals(stream.rule.error, "Stream could not be parsed")
self.assertEquals(stream.rule.succeeded, False)
self.mocked_read.assert_called_once_with()
def test_client_catches_long_exception_text(self):
subreddit = SubredditFactory()
self.mocked_read.side_effect = StreamParseException(message=words(1000))
with RedditClient([[subreddit]]) as client:
for data, stream in client:
self.assertEquals(data, None)
self.assertEquals(stream, None)
self.assertEquals(len(stream.rule.error), 1024)
self.assertEquals(stream.rule.succeeded, False)
self.mocked_read.assert_called_once_with()

File diff suppressed because it is too large Load diff

View file

@ -1,201 +0,0 @@
from datetime import datetime, timezone
from unittest.mock import patch
from uuid import uuid4
from django.test import TestCase
from newsreader.news.collection.choices import RuleTypeChoices
from newsreader.news.collection.exceptions import (
StreamDeniedException,
StreamForbiddenException,
StreamNotFoundException,
StreamTimeOutException,
)
from newsreader.news.collection.reddit import RedditCollector
from newsreader.news.collection.tests.factories import SubredditFactory
from newsreader.news.collection.tests.reddit.collector.mocks import (
empty_mock,
simple_mock_1,
simple_mock_2,
)
from newsreader.news.core.models import Post
class RedditCollectorTestCase(TestCase):
def setUp(self):
self.maxDiff = None
self.patched_get = patch("newsreader.news.collection.reddit.fetch")
self.mocked_fetch = self.patched_get.start()
self.patched_parse = patch(
"newsreader.news.collection.reddit.RedditStream.parse"
)
self.mocked_parse = self.patched_parse.start()
def tearDown(self):
patch.stopall()
def test_simple_batch(self):
self.mocked_parse.side_effect = (simple_mock_1, simple_mock_2)
rules = (
(subreddit,)
for subreddit in SubredditFactory.create_batch(
user__reddit_access_token=str(uuid4()),
user__reddit_refresh_token=str(uuid4()),
enabled=True,
size=2,
)
)
collector = RedditCollector()
collector.collect(rules=rules)
self.assertCountEqual(
Post.objects.values_list("remote_identifier", flat=True),
(
"hm6byg",
"hpkhgj",
"hph00n",
"hp9mlw",
"hpjn8x",
"gdfaip",
"hmd2ez",
"hpr28u",
"hpps6f",
"hp7uqe",
),
)
for subreddit in rules:
with self.subTest(subreddit=subreddit):
self.assertEqual(subreddit.succeeded, True)
self.assertEqual(subreddit.last_run, datetime.now(tz=timezone.utc))
self.assertEqual(subreddit.error, None)
post = Post.objects.get(
remote_identifier="hph00n", rule__type=RuleTypeChoices.subreddit
)
self.assertEqual(
post.publication_date,
datetime(2020, 7, 11, 22, 23, 24, tzinfo=timezone.utc),
)
self.assertEqual(post.author, "HannahB888")
self.assertEqual(post.title, "Drake Interplanetary Smartkey thing that I made!")
self.assertEqual(
post.url,
"https://www.reddit.com/r/starcitizen/comments/hph00n/drake_interplanetary_smartkey_thing_that_i_made/",
)
post = Post.objects.get(
remote_identifier="hpr28u", rule__type=RuleTypeChoices.subreddit
)
self.assertEqual(
post.publication_date,
datetime(2020, 7, 12, 10, 29, 10, tzinfo=timezone.utc),
)
self.assertEqual(post.author, "Sebaron")
self.assertEqual(
post.title,
"I am a medical student, and I recently programmed an open-source eye-tracker for brain research",
)
self.assertEqual(
post.url,
"https://www.reddit.com/r/Python/comments/hpr28u/i_am_a_medical_student_and_i_recently_programmed/",
)
def test_empty_batch(self):
self.mocked_parse.side_effect = (empty_mock, empty_mock)
rules = (
(subreddit,)
for subreddit in SubredditFactory.create_batch(
user__reddit_access_token=str(uuid4()),
user__reddit_refresh_token=str(uuid4()),
enabled=True,
size=2,
)
)
collector = RedditCollector()
collector.collect(rules=rules)
self.assertEqual(Post.objects.count(), 0)
for subreddit in rules:
with self.subTest(subreddit=subreddit):
self.assertEqual(subreddit.succeeded, True)
self.assertEqual(subreddit.last_run, datetime.now(tz=timezone.utc))
self.assertEqual(subreddit.error, None)
def test_not_found(self):
self.mocked_fetch.side_effect = StreamNotFoundException
rule = SubredditFactory(
user__reddit_access_token=str(uuid4()),
user__reddit_refresh_token=str(uuid4()),
enabled=True,
)
collector = RedditCollector()
collector.collect(rules=((rule,),))
self.assertEqual(Post.objects.count(), 0)
self.assertEqual(rule.succeeded, False)
self.assertEqual(rule.error, "Stream not found")
@patch("newsreader.news.collection.reddit.RedditTokenTask")
def test_denied(self, mocked_task):
self.mocked_fetch.side_effect = StreamDeniedException
rule = SubredditFactory(
user__reddit_access_token=str(uuid4()),
user__reddit_refresh_token=str(uuid4()),
enabled=True,
)
collector = RedditCollector()
collector.collect(rules=((rule,),))
self.assertEqual(Post.objects.count(), 0)
self.assertEqual(rule.succeeded, False)
self.assertEqual(rule.error, "Stream does not have sufficient permissions")
mocked_task.delay.assert_called_once_with(rule.user.pk)
def test_forbidden(self):
self.mocked_fetch.side_effect = StreamForbiddenException
rule = SubredditFactory(
user__reddit_access_token=str(uuid4()),
user__reddit_refresh_token=str(uuid4()),
enabled=True,
)
collector = RedditCollector()
collector.collect(rules=((rule,),))
self.assertEqual(Post.objects.count(), 0)
self.assertEqual(rule.succeeded, False)
self.assertEqual(rule.error, "Stream forbidden")
def test_timed_out(self):
self.mocked_fetch.side_effect = StreamTimeOutException
rule = SubredditFactory(
user__reddit_access_token=str(uuid4()),
user__reddit_refresh_token=str(uuid4()),
enabled=True,
)
collector = RedditCollector()
collector.collect(rules=((rule,),))
self.assertEqual(Post.objects.count(), 0)
self.assertEqual(rule.succeeded, False)
self.assertEqual(rule.error, "Stream timed out")

File diff suppressed because it is too large Load diff

View file

@ -1,144 +0,0 @@
from json.decoder import JSONDecodeError
from unittest.mock import patch
from uuid import uuid4
from django.test import TestCase
from newsreader.accounts.tests.factories import UserFactory
from newsreader.news.collection.exceptions import (
StreamDeniedException,
StreamException,
StreamForbiddenException,
StreamNotFoundException,
StreamParseException,
StreamTimeOutException,
)
from newsreader.news.collection.reddit import RedditStream
from newsreader.news.collection.tests.factories import SubredditFactory
from newsreader.news.collection.tests.reddit.stream.mocks import simple_mock
class RedditStreamTestCase(TestCase):
def setUp(self):
self.maxDiff = None
self.patched_fetch = patch("newsreader.news.collection.reddit.fetch")
self.mocked_fetch = self.patched_fetch.start()
def tearDown(self):
patch.stopall()
def test_simple_stream(self):
self.mocked_fetch.return_value.json.return_value = simple_mock
access_token = str(uuid4())
user = UserFactory(reddit_access_token=access_token)
subreddit = SubredditFactory(user=user)
stream = RedditStream(subreddit)
data, stream = stream.read()
self.assertEquals(data, simple_mock)
self.assertEquals(stream, stream)
self.mocked_fetch.assert_called_once_with(
subreddit.url, headers={"Authorization": f"bearer {access_token}"}
)
def test_stream_raises_exception(self):
self.mocked_fetch.side_effect = StreamException
access_token = str(uuid4())
user = UserFactory(reddit_access_token=access_token)
subreddit = SubredditFactory(user=user)
stream = RedditStream(subreddit)
with self.assertRaises(StreamException):
stream.read()
self.mocked_fetch.assert_called_once_with(
subreddit.url, headers={"Authorization": f"bearer {access_token}"}
)
def test_stream_raises_denied_exception(self):
self.mocked_fetch.side_effect = StreamDeniedException
access_token = str(uuid4())
user = UserFactory(reddit_access_token=access_token)
subreddit = SubredditFactory(user=user)
stream = RedditStream(subreddit)
with self.assertRaises(StreamDeniedException):
stream.read()
self.mocked_fetch.assert_called_once_with(
subreddit.url, headers={"Authorization": f"bearer {access_token}"}
)
def test_stream_raises_not_found_exception(self):
self.mocked_fetch.side_effect = StreamNotFoundException
access_token = str(uuid4())
user = UserFactory(reddit_access_token=access_token)
subreddit = SubredditFactory(user=user)
stream = RedditStream(subreddit)
with self.assertRaises(StreamNotFoundException):
stream.read()
self.mocked_fetch.assert_called_once_with(
subreddit.url, headers={"Authorization": f"bearer {access_token}"}
)
def test_stream_raises_time_out_exception(self):
self.mocked_fetch.side_effect = StreamTimeOutException
access_token = str(uuid4())
user = UserFactory(reddit_access_token=access_token)
subreddit = SubredditFactory(user=user)
stream = RedditStream(subreddit)
with self.assertRaises(StreamTimeOutException):
stream.read()
self.mocked_fetch.assert_called_once_with(
subreddit.url, headers={"Authorization": f"bearer {access_token}"}
)
def test_stream_raises_forbidden_exception(self):
self.mocked_fetch.side_effect = StreamForbiddenException
access_token = str(uuid4())
user = UserFactory(reddit_access_token=access_token)
subreddit = SubredditFactory(user=user)
stream = RedditStream(subreddit)
with self.assertRaises(StreamForbiddenException):
stream.read()
self.mocked_fetch.assert_called_once_with(
subreddit.url, headers={"Authorization": f"bearer {access_token}"}
)
def test_stream_raises_parse_exception(self):
self.mocked_fetch.return_value.json.side_effect = JSONDecodeError(
"No json found", "{}", 5
)
access_token = str(uuid4())
user = UserFactory(reddit_access_token=access_token)
subreddit = SubredditFactory(user=user)
stream = RedditStream(subreddit)
with self.assertRaises(StreamParseException):
stream.read()
self.mocked_fetch.assert_called_once_with(
subreddit.url, headers={"Authorization": f"bearer {access_token}"}
)

View file

@ -1,142 +0,0 @@
from datetime import timedelta
from django.test import TestCase
from django.utils import timezone
from freezegun import freeze_time
from newsreader.accounts.tests.factories import UserFactory
from newsreader.news.collection.choices import RuleTypeChoices
from newsreader.news.collection.reddit import RedditScheduler
from newsreader.news.collection.tests.factories import CollectionRuleFactory
@freeze_time("2019-10-30 12:30:00")
class RedditSchedulerTestCase(TestCase):
def test_simple(self):
user_1 = UserFactory(
reddit_access_token="1231414", reddit_refresh_token="5235262"
)
user_2 = UserFactory(
reddit_access_token="3414777", reddit_refresh_token="3423425"
)
user_1_rules = [
CollectionRuleFactory(
user=user_1,
type=RuleTypeChoices.subreddit,
last_run=timezone.now() - timedelta(days=4),
enabled=True,
),
CollectionRuleFactory(
user=user_1,
type=RuleTypeChoices.subreddit,
last_run=timezone.now() - timedelta(days=3),
enabled=True,
),
CollectionRuleFactory(
user=user_1,
type=RuleTypeChoices.subreddit,
last_run=timezone.now() - timedelta(days=2),
enabled=True,
),
]
user_2_rules = [
CollectionRuleFactory(
user=user_2,
type=RuleTypeChoices.subreddit,
last_run=timezone.now() - timedelta(days=4),
enabled=True,
),
CollectionRuleFactory(
user=user_2,
type=RuleTypeChoices.subreddit,
last_run=timezone.now() - timedelta(days=3),
enabled=True,
),
CollectionRuleFactory(
user=user_2,
type=RuleTypeChoices.subreddit,
last_run=timezone.now() - timedelta(days=2),
enabled=True,
),
]
scheduler = RedditScheduler()
scheduled_subreddits = scheduler.get_scheduled_rules()
user_1_batch = [subreddit.pk for subreddit in scheduled_subreddits[0]]
self.assertIn(user_1_rules[0].pk, user_1_batch)
self.assertIn(user_1_rules[1].pk, user_1_batch)
self.assertIn(user_1_rules[2].pk, user_1_batch)
user_2_batch = [subreddit.pk for subreddit in scheduled_subreddits[1]]
self.assertIn(user_2_rules[0].pk, user_2_batch)
self.assertIn(user_2_rules[1].pk, user_2_batch)
self.assertIn(user_2_rules[2].pk, user_2_batch)
def test_max_amount(self):
users = UserFactory.create_batch(
reddit_access_token="1231414", reddit_refresh_token="5235262", size=5
)
nested_rules = [
CollectionRuleFactory.create_batch(
name=f"rule-{index}",
type=RuleTypeChoices.subreddit,
last_run=timezone.now() - timedelta(seconds=index),
enabled=True,
user=user,
size=15,
)
for index, user in enumerate(users)
]
rules = [rule for rule_list in nested_rules for rule in rule_list]
scheduler = RedditScheduler()
scheduled_subreddits = [
subreddit.pk
for batch in scheduler.get_scheduled_rules()
for subreddit in batch
]
for rule in rules[16:76]:
with self.subTest(rule=rule):
self.assertIn(rule.pk, scheduled_subreddits)
for rule in rules[0:15]:
with self.subTest(rule=rule):
self.assertNotIn(rule.pk, scheduled_subreddits)
def test_max_user_amount(self):
user = UserFactory(
reddit_access_token="1231414", reddit_refresh_token="5235262"
)
rules = [
CollectionRuleFactory(
name=f"rule-{index}",
type=RuleTypeChoices.subreddit,
last_run=timezone.now() - timedelta(seconds=index),
enabled=True,
user=user,
)
for index in range(1, 17)
]
scheduler = RedditScheduler()
scheduled_subreddits = [
subreddit.pk
for batch in scheduler.get_scheduled_rules()
for subreddit in batch
]
for rule in rules[1:16]:
with self.subTest(rule=rule):
self.assertIn(rule.pk, scheduled_subreddits)
self.assertNotIn(rules[0].pk, scheduled_subreddits)

View file

@ -88,17 +88,3 @@ class FeedUpdateViewTestCase(CollectionRuleViewTestCase, TestCase):
self.rule.refresh_from_db()
self.assertEqual(self.rule.category, None)
def test_rules_only(self):
rule = FeedFactory(
name="Python",
url="https://reddit.com/r/python",
user=self.user,
category=self.category,
type=RuleTypeChoices.subreddit,
)
url = reverse("news:collection:feed-update", kwargs={"pk": rule.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, 404)

View file

@ -1,133 +0,0 @@
from django.test import TestCase
from django.urls import reverse
from django.utils.translation import gettext as _
from newsreader.news.collection.choices import RuleTypeChoices
from newsreader.news.collection.models import CollectionRule
from newsreader.news.collection.reddit import REDDIT_API_URL, REDDIT_URL
from newsreader.news.collection.tests.factories import SubredditFactory
from newsreader.news.collection.tests.views.base import CollectionRuleViewTestCase
from newsreader.news.core.tests.factories import CategoryFactory
class SubRedditCreateViewTestCase(CollectionRuleViewTestCase, TestCase):
def setUp(self):
super().setUp()
self.form_data = {
"name": "new rule",
"url": f"{REDDIT_API_URL}/r/aww",
"category": str(self.category.pk),
"reddit_allow_nfsw": False,
"reddit_allow_spoiler": False,
"reddit_allow_viewed": True,
"reddit_upvotes_min": 0,
"reddit_comments_min": 0,
}
self.url = reverse("news:collection:subreddit-create")
def test_creation(self):
response = self.client.post(self.url, self.form_data)
self.assertEqual(response.status_code, 302)
rule = CollectionRule.objects.get(name="new rule")
self.assertEqual(rule.type, RuleTypeChoices.subreddit)
self.assertEqual(rule.url, f"{REDDIT_API_URL}/r/aww")
self.assertEqual(rule.favicon, None)
self.assertEqual(rule.category.pk, self.category.pk)
self.assertEqual(rule.user.pk, self.user.pk)
def test_regular_reddit_url(self):
self.form_data.update(url=f"{REDDIT_URL}/r/aww")
response = self.client.post(self.url, self.form_data)
self.assertContains(response, _("This does not look like an Reddit API URL"))
class SubRedditUpdateViewTestCase(CollectionRuleViewTestCase, TestCase):
def setUp(self):
super().setUp()
self.rule = SubredditFactory(
name="Python",
url=f"{REDDIT_API_URL}/r/python.json",
user=self.user,
category=self.category,
type=RuleTypeChoices.subreddit,
)
self.url = reverse(
"news:collection:subreddit-update", kwargs={"pk": self.rule.pk}
)
self.form_data = {
"name": self.rule.name,
"url": self.rule.url,
"category": str(self.category.pk),
"reddit_allow_nfsw": False,
"reddit_allow_spoiler": False,
"reddit_allow_viewed": True,
"reddit_upvotes_min": 0,
"reddit_comments_min": 0,
}
def test_name_change(self):
self.form_data.update(name="Python 2")
response = self.client.post(self.url, self.form_data)
self.assertEqual(response.status_code, 302)
self.rule.refresh_from_db()
self.assertEqual(self.rule.name, "Python 2")
def test_category_change(self):
new_category = CategoryFactory(user=self.user)
self.form_data.update(category=new_category.pk)
response = self.client.post(self.url, self.form_data)
self.assertEqual(response.status_code, 302)
self.rule.refresh_from_db()
self.assertEqual(self.rule.category.pk, new_category.pk)
def test_subreddit_rules_only(self):
rule = SubredditFactory(
name="Fake subreddit",
url="https://leddit.com/r/python",
user=self.user,
category=self.category,
type=RuleTypeChoices.feed,
)
url = reverse("news:collection:subreddit-update", kwargs={"pk": rule.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
def test_url_change(self):
self.form_data.update(name="aww", url=f"{REDDIT_API_URL}/r/aww")
response = self.client.post(self.url, self.form_data)
self.assertEqual(response.status_code, 302)
rule = CollectionRule.objects.get(name="aww")
self.assertEqual(rule.type, RuleTypeChoices.subreddit)
self.assertEqual(rule.url, f"{REDDIT_API_URL}/r/aww")
self.assertEqual(rule.favicon, None)
self.assertEqual(rule.category.pk, self.category.pk)
self.assertEqual(rule.user.pk, self.user.pk)
def test_regular_reddit_url(self):
self.form_data.update(url=f"{REDDIT_URL}/r/aww")
response = self.client.post(self.url, self.form_data)
self.assertContains(response, _("This does not look like an Reddit API URL"))

View file

@ -14,8 +14,6 @@ from newsreader.news.collection.views import (
FeedCreateView,
FeedUpdateView,
OPMLImportView,
SubRedditCreateView,
SubRedditUpdateView,
)
@ -49,15 +47,4 @@ urlpatterns = [
name="rules-disable",
),
path("rules/import/", login_required(OPMLImportView.as_view()), name="import"),
# Reddit
path(
"subreddits/create/",
login_required(SubRedditCreateView.as_view()),
name="subreddit-create",
),
path(
"subreddits/<int:pk>/",
login_required(SubRedditUpdateView.as_view()),
name="subreddit-update",
),
]

View file

@ -60,6 +60,6 @@ def truncate_text(cls, field_name, value):
return value
if len(value) > max_length:
return f"{value[:max_length - 1]}"
return f"{value[: max_length - 1]}"
return value

View file

@ -3,10 +3,6 @@ from newsreader.news.collection.views.feed import (
FeedUpdateView,
OPMLImportView,
)
from newsreader.news.collection.views.reddit import (
SubRedditCreateView,
SubRedditUpdateView,
)
from newsreader.news.collection.views.rules import (
CollectionRuleBulkDeleteView,
CollectionRuleBulkDisableView,
@ -19,8 +15,6 @@ __all__ = [
"FeedCreateView",
"FeedUpdateView",
"OPMLImportView",
"SubRedditCreateView",
"SubRedditUpdateView",
"CollectionRuleBulkDeleteView",
"CollectionRuleBulkDisableView",
"CollectionRuleBulkEnableView",

Some files were not shown because too many files have changed in this diff Show more