diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 00000000..cd0b0362 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,8 @@ +node_modules/ +static/ +docker/jschan/Dockerfile +docker/jschan/Dockerfile-reset +docker/nginx/Dockerfile +docker/static/ +tools/ +gulp/res/js/socket.io.js diff --git a/.gitignore b/.gitignore index 17f09b2b..6a77ea69 100644 --- a/.gitignore +++ b/.gitignore @@ -3,6 +3,7 @@ backup.sh configs/*.json configs/*.js static/* +docker/static/* gulp/res/js/socket.io.js /gulp/res/css/codethemes gulp/res/css/locals.css diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index d3f541bb..64e7e8e6 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -31,5 +31,28 @@ Read the code to understand, but basically: * TAB for indentation. * Please include comments. +## Running tests (WIP) + +Make sure these still pass after your changes, or adjust them to meet the new expected results. + +There is a "jschan-test" service in the `docker-compose.yml` file that will run all the tests in a jschan instance using the docker instance. See the advanced section of installation for some instruction on how to use this. + +You can also Run them locally if you have an instance setup (or for quickly running unit tests): + +```bash +#unit tests +npm run test +# OR npm run test:unit + +#integration tests +TEST_ADMIN_PASSWORD= npm run test:integration + +#all tests +npm run test:all + +#specific test(s) +npm run test:all +``` + Thanks, Tom diff --git a/INSTALLATION.md b/INSTALLATION.md index eba22f8f..36467e57 100644 --- a/INSTALLATION.md +++ b/INSTALLATION.md @@ -242,3 +242,21 @@ To build all css files, run `gulp css`. For some situations, such as adding or r For detecting and automatically updating Tor exit node lists, see [tools/update_tor_exits.sh](tools/update_tor_exits.sh) For updating the GeoIP database for nginx, see [tools/update_geoip.sh](tools/update_geoip.sh) + + +#### Docker + +Experimental, strictly for development only. + +Basically: + +```bash +docker-compose up -d mongodb redis + +#on the first run, or to "gulp reset" later: +docker-compose up jschan-reset + +docker-compose up -d jschan + +docker-compose up -d nginx +``` diff --git a/controllers/forms/globalsettings.js b/controllers/forms/globalsettings.js index 3e3bcc78..139e84e6 100644 --- a/controllers/forms/globalsettings.js +++ b/controllers/forms/globalsettings.js @@ -164,12 +164,12 @@ module.exports = { { result: numberBody(req.body.board_defaults_tph_trigger_action, 0, 4), expected: true, error: 'Board default tph trigger action must be a number from 0-4' }, { result: numberBody(req.body.board_defaults_captcha_reset, 0, 2), expected: true, error: 'Board defaults captcha reset must be a number from 0-2' }, { result: numberBody(req.body.board_defaults_lock_reset, 0, 2), expected: true, error: 'Board defaults lock reset must be a number from 0-2' }, - { result: numberBodyVariable(req.body.board_defaults_reply_limit, req.body.global_limits_reply_limit_min, globalLimits.replyLimit.min, req.body.global_limits_reply_limit_max, globalLimits.replyLimit.max), expected: true, error: `Board defaults reply limit must be within global limits` }, - { result: numberBodyVariable(req.body.board_defaults_thread_limit, req.body.global_limits_thread_limit_min, globalLimits.threadLimit.min, req.body.global_limits_thread_limit_max, globalLimits.threadLimit.max), expected: true, error: `Board defaults thread limit must be within global limits` }, - { result: numberBodyVariable(req.body.board_defaults_bump_limit, req.body.global_limits_bump_limit_min, globalLimits.bumpLimit.min, req.body.global_limits_bump_limit_max, globalLimits.bumpLimit.max), expected: true, error: `Board defaults bump limit must be within global limits` }, - { result: numberBodyVariable(req.body.board_defaults_max_files, 0, 0, req.body.global_limits_post_files_max, globalLimits.postFiles.max), expected: true, error: `Board defaults max files must be within global limits` }, - { result: numberBodyVariable(req.body.board_defaults_max_thread_message_length, 0, 0, req.body.global_limits_field_length_message, globalLimits.fieldLength.message), expected: true, error: `Board defaults max thread message length must be within global limits` }, - { result: numberBodyVariable(req.body.board_defaults_max_reply_message_length, 0, 0, req.body.global_limits_field_length_message, globalLimits.fieldLength.message), expected: true, error: `Board defaults max reply message length must be within global limits` }, + { result: numberBodyVariable(req.body.board_defaults_reply_limit, globalLimits.replyLimit.min, req.body.global_limits_reply_limit_min, globalLimits.replyLimit.max, req.body.global_limits_reply_limit_max), expected: true, error: `Board defaults reply limit must be within global limits` }, + { result: numberBodyVariable(req.body.board_defaults_thread_limit, globalLimits.threadLimit.min, req.body.global_limits_thread_limit_min, globalLimits.threadLimit.max, req.body.global_limits_thread_limit_max), expected: true, error: `Board defaults thread limit must be within global limits` }, + { result: numberBodyVariable(req.body.board_defaults_bump_limit, globalLimits.bumpLimit.min, req.body.global_limits_bump_limit_min, globalLimits.bumpLimit.max, req.body.global_limits_bump_limit_max), expected: true, error: `Board defaults bump limit must be within global limits` }, + { result: numberBodyVariable(req.body.board_defaults_max_files, 0, 0, globalLimits.postFiles.max, req.body.global_limits_post_files_max), expected: true, error: `Board defaults max files must be within global limits` }, + { result: numberBodyVariable(req.body.board_defaults_max_thread_message_length, 0, 0, globalLimits.fieldLength.message, req.body.global_limits_field_length_message), expected: true, error: `Board defaults max thread message length must be within global limits` }, + { result: numberBodyVariable(req.body.board_defaults_max_reply_message_length, 0, 0, globalLimits.fieldLength.message, req.body.global_limits_field_length_message), expected: true, error: `Board defaults max reply message length must be within global limits` }, { result: numberBody(req.body.board_defaults_min_thread_message_length), expected: true, error: 'Board defaults min thread message length must be a number' }, { result: numberBody(req.body.board_defaults_min_reply_message_length), expected: true, error: 'Board defaults min reply message length must be a number' }, { result: minmaxBody(req.body.board_defaults_min_thread_message_length, req.body.board_defaults_max_thread_message_length), expected: true, error: 'Board defaults thread message length min must be less than max' }, diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 00000000..8f7adc48 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,70 @@ +version: "3.5" +services: + + redis: + command: redis-server --requirepass changeme + image: redis:alpine + + mongodb: + image: mongo:latest + environment: + - MONGO_INITDB_ROOT_USERNAME=jschan + - MONGO_INITDB_ROOT_PASSWORD=changeme + + nginx: + build: + context: . + dockerfile: ./docker/nginx/Dockerfile + args: + ENABLED_MODULES: geoip + ports: + - "80:80" + volumes: + - ./docker/static/:/path/to/jschan/static/ + depends_on: + - jschan + + jschan: + build: + context: . + dockerfile: ./docker/jschan/Dockerfile + network: jschan_default + environment: + - NODE_ENV=development + - JSCHAN_IP=0.0.0.0 + - NO_CAPTCHA=1 + - MONGO_USERNAME=jschan + - MONGO_PASSWORD=changeme + - REDIS_PASSWORD=changeme + - COOKIE_SECRET=changeme + - TRIPCODE_SECRET=changeme + - IP_HASH_SECRET=changeme + - POST_PASSWORD_SECRET=changeme + - GOOGLE_SITEKEY=changeme + - GOOGLE_SECRETKEY=changeme + - HCAPTCHA_SITEKEY=10000000-ffff-ffff-ffff-000000000001 + - HCAPTCHA_SECRETKEY=0x0000000000000000000000000000000000000000 + volumes: + - ./docker/static:/opt/static/ + depends_on: + - redis + - mongodb + + jschan-reset: + build: + context: . + dockerfile: ./docker/jschan/Dockerfile-reset + network: jschan_default + environment: + - MONGO_USERNAME=jschan + - MONGO_PASSWORD=changeme + - REDIS_PASSWORD=changeme + volumes: + - ./docker/static:/opt/static/ + depends_on: + - redis + - mongodb + +networks: + default: + name: jschan_default diff --git a/docker/jschan/Dockerfile b/docker/jschan/Dockerfile new file mode 100644 index 00000000..a6188dda --- /dev/null +++ b/docker/jschan/Dockerfile @@ -0,0 +1,23 @@ +FROM node:16 + +RUN apt-get update -y +RUN apt-get install ffmpeg imagemagick graphicsmagick -y + +WORKDIR /opt + +COPY . . + +RUN npm install + +RUN npm install -g pm2 gulp + +COPY ./docker/jschan/secrets.js ./configs/secrets.js + +#i fucking hate docker +ENV MONGO_USERNAME jschan +ENV MONGO_PASSWORD changeme +ENV REDIS_PASSWORD changeme + +RUN gulp generate-favicon + +CMD ["/bin/sh", "-c", "gulp; pm2-runtime start ecosystem.config.js"] diff --git a/docker/jschan/Dockerfile-reset b/docker/jschan/Dockerfile-reset new file mode 100644 index 00000000..d7edeffb --- /dev/null +++ b/docker/jschan/Dockerfile-reset @@ -0,0 +1,20 @@ +FROM node:16 + +WORKDIR /opt + +COPY . . + +RUN npm install + +RUN npm i -g pm2 gulp + +COPY ./docker/jschan/secrets.js ./configs/secrets.js + +#i fucking hate docker +ENV MONGO_USERNAME jschan +ENV MONGO_PASSWORD changeme +ENV REDIS_PASSWORD changeme + +RUN gulp generate-favicon + +CMD ["/bin/sh", "-c", "gulp reset; gulp"] diff --git a/docker/jschan/secrets.js b/docker/jschan/secrets.js new file mode 100644 index 00000000..4f571d19 --- /dev/null +++ b/docker/jschan/secrets.js @@ -0,0 +1,40 @@ +module.exports = { + + //mongodb connection string + dbURL: `mongodb://${process.env.MONGO_USERNAME}:${process.env.MONGO_PASSWORD}@mongodb:27017`, + + //database name + dbName: 'jschan', + + //redis connection info + redis: { + host: 'redis', + port: '6379', + password: process.env.REDIS_PASSWORD, + }, + + //backend webserver port + port: 7000, + + //secrets/salts for various things + cookieSecret: process.env.COOKIE_SECRET, + tripcodeSecret: process.env.TRIPCODE_SECRET, + ipHashSecret: process.env.IP_HASH_SECRET, + postPasswordSecret: process.env.POST_PASSWORD_SECRET, + + //keys for google recaptcha + google: { + siteKey: process.env.GOOGLE_SITEKEY, + secretKey: process.env.GOOGLE_SECRETKEY, + }, + + //keys for hcaptcha + hcaptcha: { + siteKey: process.env.HCAPTCHA_SITEKEY, + secretKey: process.env.HCAPTCHA_SECRETKEY, + }, + + //enable debug logging + debugLogs: true, + +}; diff --git a/docker/nginx/Dockerfile b/docker/nginx/Dockerfile new file mode 100644 index 00000000..e5495f02 --- /dev/null +++ b/docker/nginx/Dockerfile @@ -0,0 +1,88 @@ +FROM nginx:mainline as builder + +ARG ENABLED_MODULES + +RUN set -ex \ + && if [ "$ENABLED_MODULES" = "" ]; then \ + echo "No additional modules enabled, exiting"; \ + exit 1; \ + fi + +#COPY ./ /modules/ + +RUN set -ex \ + && apt update \ + && apt install -y --no-install-suggests --no-install-recommends \ + patch make wget mercurial devscripts debhelper dpkg-dev \ + quilt lsb-release build-essential libxml2-utils xsltproc \ + equivs git g++ \ + && hg clone -r ${NGINX_VERSION}-${PKG_RELEASE%%~*} https://hg.nginx.org/pkg-oss/ \ + && cd pkg-oss \ + && mkdir /tmp/packages \ + && for module in $ENABLED_MODULES; do \ + echo "Building $module for nginx-$NGINX_VERSION"; \ + if [ -d /modules/$module ]; then \ + echo "Building $module from user-supplied sources"; \ + # check if module sources file is there and not empty + if [ ! -s /modules/$module/source ]; then \ + echo "No source file for $module in modules/$module/source, exiting"; \ + exit 1; \ + fi; \ + # some modules require build dependencies + if [ -f /modules/$module/build-deps ]; then \ + echo "Installing $module build dependencies"; \ + apt update && apt install -y --no-install-suggests --no-install-recommends $(cat /modules/$module/build-deps | xargs); \ + fi; \ + # if a module has a build dependency that is not in a distro, provide a + # shell script to fetch/build/install those + # note that shared libraries produced as a result of this script will + # not be copied from the builder image to the main one so build static + if [ -x /modules/$module/prebuild ]; then \ + echo "Running prebuild script for $module"; \ + /modules/$module/prebuild; \ + fi; \ + /pkg-oss/build_module.sh -v $NGINX_VERSION -f -y -o /tmp/packages -n $module $(cat /modules/$module/source); \ + BUILT_MODULES="$BUILT_MODULES $(echo $module | tr '[A-Z]' '[a-z]' | tr -d '[/_\-\.\t ]')"; \ + elif make -C /pkg-oss/debian list | grep -P "^$module\s+\d" > /dev/null; then \ + echo "Building $module from pkg-oss sources"; \ + cd /pkg-oss/debian; \ + make rules-module-$module BASE_VERSION=$NGINX_VERSION NGINX_VERSION=$NGINX_VERSION; \ + mk-build-deps --install --tool="apt-get -o Debug::pkgProblemResolver=yes --no-install-recommends --yes" debuild-module-$module/nginx-$NGINX_VERSION/debian/control; \ + make module-$module BASE_VERSION=$NGINX_VERSION NGINX_VERSION=$NGINX_VERSION; \ + find ../../ -maxdepth 1 -mindepth 1 -type f -name "*.deb" -exec mv -v {} /tmp/packages/ \;; \ + BUILT_MODULES="$BUILT_MODULES $module"; \ + else \ + echo "Don't know how to build $module module, exiting"; \ + exit 1; \ + fi; \ + done \ + && echo "BUILT_MODULES=\"$BUILT_MODULES\"" > /tmp/packages/modules.env + +FROM nginx:mainline +COPY --from=builder /tmp/packages /tmp/packages +RUN set -ex \ + && apt update \ + && apt-get install wget -y \ + && . /tmp/packages/modules.env \ + && for module in $BUILT_MODULES; do \ + apt install --no-install-suggests --no-install-recommends -y /tmp/packages/nginx-module-${module}_${NGINX_VERSION}*.deb; \ + done \ + && rm -rf /tmp/packages \ + && rm -rf /var/lib/apt/lists/ + +RUN mkdir /usr/share/GeoIP +RUN wget https://dl.miyuru.lk/geoip/dbip/country/dbip.dat.gz +RUN gunzip dbip.dat.gz +RUN mv dbip.dat /usr/share/GeoIP/GeoIP.dat + +RUN rm /etc/nginx/conf.d/default.conf + +COPY ./docker/nginx/nginx.conf /etc/nginx/nginx.conf + +COPY ./docker/nginx/jschan.conf /etc/nginx/conf.d/ + +COPY ./configs/nginx/snippets/ /etc/nginx/snippets/ + +EXPOSE 80 + +CMD ["nginx", "-g", "daemon off;"] diff --git a/docker/nginx/jschan.conf b/docker/nginx/jschan.conf new file mode 100644 index 00000000..49c2b9e8 --- /dev/null +++ b/docker/nginx/jschan.conf @@ -0,0 +1,16 @@ +upstream chan { + server jschan:7000; +} + +server { + server_name _; + client_max_body_size 0; + + listen 80; + listen [::]:80; + + include /etc/nginx/snippets/security_headers.conf; + include /etc/nginx/snippets/error_pages.conf; + include /etc/nginx/snippets/jschan_clearnet_routes.conf; + include /etc/nginx/snippets/jschan_common_routes.conf; +} diff --git a/docker/nginx/nginx.conf b/docker/nginx/nginx.conf new file mode 100644 index 00000000..365a8659 --- /dev/null +++ b/docker/nginx/nginx.conf @@ -0,0 +1,44 @@ +load_module /etc/nginx/modules/ngx_http_geoip_module-debug.so; + +worker_processes auto; +pid /run/nginx.pid; + +events { + worker_connections 1000; +} + +http { + include /etc/nginx/mime.types; + default_type application/octet-stream; + geoip_country /usr/share/GeoIP/GeoIP.dat; + + map_hash_max_size 4096; + map_hash_bucket_size 256; + + aio threads; + sendfile on; + tcp_nopush on; + tcp_nodelay on; + server_tokens off; + types_hash_max_size 2048; + server_names_hash_bucket_size 128; + client_max_body_size 0; + + #proxy_request_buffering off; + + log_format custom '[$time_local] $remote_addr $status "$request" "$http_referer" "$http_user_agent" $bytes_sent'; + access_log /var/log/nginx/access.log custom; + error_log /var/log/nginx/error.log; + + gzip on; + #gzip_vary off; + gzip_comp_level 6; + gzip_proxied any; + gzip_types text/plain text/css text/js text/xml text/javascript image/x-icon application/javascript application/json application/xml application/rss+xml image/svg+xml; + + ssl_protocols TLSv1.2 TLSv1.3; # Dropping SSLv3, ref: POODLE + ssl_prefer_server_ciphers on; + + include /etc/nginx/conf.d/*; + +} diff --git a/helpers/checks/captcha.js b/helpers/checks/captcha.js index d4f57fa8..9e54aef0 100644 --- a/helpers/checks/captcha.js +++ b/helpers/checks/captcha.js @@ -10,6 +10,10 @@ const { Captchas } = require(__dirname+'/../../db/') module.exports = async (captchaInput, captchaId) => { + if (process.env.NO_CAPTCHA) { + return true; + } + const { captchaOptions } = config.get; //check if captcha field in form is valid diff --git a/helpers/datearray.js b/helpers/datearray.js deleted file mode 100644 index eea09017..00000000 --- a/helpers/datearray.js +++ /dev/null @@ -1,12 +0,0 @@ -'use strict'; - -//https://stackoverflow.com/a/4413721 -module.exports = (startDate, stopDate) => { - const dateArray = new Array(); - let currentDate = startDate; - while (currentDate <= stopDate) { - dateArray.push(new Date (currentDate.valueOf())); - currentDate.setDate(currentDate.getDate() + 1); - } - return dateArray; -} diff --git a/helpers/decodequeryip.js b/helpers/decodequeryip.js index 644531e0..d823675a 100644 --- a/helpers/decodequeryip.js +++ b/helpers/decodequeryip.js @@ -5,7 +5,7 @@ const escapeRegExp = require(__dirname+'/escaperegexp.js') , Permissions = require(__dirname+'/permissions.js'); module.exports = (query, permissions) => { - if (query.ip && typeof query.ip === 'string') { + if (query && query.ip && typeof query.ip === 'string') { const decoded = decodeURIComponent(query.ip); //if is IP but no permission, return null if (isIP(decoded) && !permissions.get(Permissions.VIEW_RAW_IP)) { diff --git a/helpers/decodequeryip.test.js b/helpers/decodequeryip.test.js new file mode 100644 index 00000000..f23b4ad6 --- /dev/null +++ b/helpers/decodequeryip.test.js @@ -0,0 +1,25 @@ +const decodeQueryIp = require('./decodequeryip.js'); +const Permission = require('./permission.js'); +const Permissions = require('./permissions.js'); +const ROOT = new Permission(); +ROOT.setAll(Permission.allPermissions); +const NO_PERMISSION = new Permission(); + +describe('decode query ip', () => { + + const cases = [ + { in: { query: null, permission: ROOT }, out: null }, + { in: { query: {}, permission: ROOT }, out: null }, + { in: { query: { ip: '10.0.0.1' }, permission: ROOT }, out: '10.0.0.1' }, + { in: { query: { ip: '10.0.0.1' }, permission: NO_PERMISSION }, out: null }, + { in: { query: { ip: '8s7AGX4n.qHsw9mp.uw54Nfl.IP' }, permission: ROOT }, out: '8s7AGX4n.qHsw9mp.uw54Nfl.IP' }, + { in: { query: { ip: '8s7AGX4n.qHsw9mp.uw54Nfl.IP' }, permission: NO_PERMISSION }, out: '8s7AGX4n.qHsw9mp.uw54Nfl.IP' }, + ]; + + for(let i in cases) { + test(`should output ${cases[i].out} for an input of ${cases[i].in}`, () => { + expect(decodeQueryIp(cases[i].in.query, cases[i].in.permission)).toStrictEqual(cases[i].out) + }); + } + +}); diff --git a/helpers/escaperegexp.test.js b/helpers/escaperegexp.test.js new file mode 100644 index 00000000..4f5c69ba --- /dev/null +++ b/helpers/escaperegexp.test.js @@ -0,0 +1,17 @@ +const escapeRegExp = require('./escaperegexp.js'); + +describe('escape regular expression', () => { + + const cases = [ + { in: '', out: '' }, + { in: '/', out: '/' }, + { in: '.*+?^${}()|[]\\', out: '\\.\\*\\+\\?\\^\\$\\{\\}\\(\\)\\|\\[\\]\\\\' }, + ]; + + for(let i in cases) { + test(`should output ${cases[i].out} for an input of ${cases[i].in}`, () => { + expect(escapeRegExp(cases[i].in)).toStrictEqual(cases[i].out) + }); + } + +}); diff --git a/helpers/files/formatsize.js b/helpers/files/formatsize.js index 7c8f7bef..a19834db 100644 --- a/helpers/files/formatsize.js +++ b/helpers/files/formatsize.js @@ -7,6 +7,6 @@ module.exports = (bytes) => { if (bytes === 0) { return '0B'; } - const i = Math.floor(Math.log(bytes) / Math.log(k)); + const i = Math.min(sizes.length-1, Math.floor(Math.log(bytes) / Math.log(k))); return `${parseFloat((bytes / Math.pow(k, i)).toFixed(1))}${sizes[i]}`; }; diff --git a/helpers/files/formatsize.test.js b/helpers/files/formatsize.test.js new file mode 100644 index 00000000..905caec6 --- /dev/null +++ b/helpers/files/formatsize.test.js @@ -0,0 +1,19 @@ +const formatSize = require('./formatsize.js'); + +describe('formatSize() - convert bytes to human readable file size', () => { + const cases = [ + {in: 1024, out: "1KB"}, + {in: Math.pow(1024, 2), out: "1MB"}, + {in: Math.pow(1024, 3), out: "1GB"}, + {in: Math.pow(1024, 4), out: "1TB"}, + {in: Math.pow(1024, 5), out: "1024TB"}, + {in: Math.pow(1024, 3)+(Math.pow(1024, 2)*512), out: "1.5GB"}, + {in: 100, out: "100B"}, + {in: 0, out: "0B"}, + ]; + for(let i in cases) { + test(`should output ${cases[i].out} for an input of ${cases[i].in} bytes`, () => { + expect(formatSize(cases[i].in)).toBe(cases[i].out); + }); + } +}); diff --git a/helpers/pagequeryconverter.js b/helpers/pagequeryconverter.js index a7fdb75a..c1aa3e85 100644 --- a/helpers/pagequeryconverter.js +++ b/helpers/pagequeryconverter.js @@ -1,6 +1,7 @@ 'use strict'; module.exports = (query, limit) => { + query = query || {}; const nopage = { ...query }; delete nopage.page; const queryString = new URLSearchParams(nopage).toString(); diff --git a/helpers/pagequeryconverter.test.js b/helpers/pagequeryconverter.test.js new file mode 100644 index 00000000..a7014991 --- /dev/null +++ b/helpers/pagequeryconverter.test.js @@ -0,0 +1,23 @@ +const pageQueryConverter = require('./pagequeryconverter.js'); +const limit = 30; + +describe('page query converter', () => { + const cases = [ + { in: null, out: { offset: 0, "queryString": "", page: 1 } }, + { in: { }, out: { offset: 0, "queryString": "", page: 1 } }, + { in: { page: [1, 2, 3] }, out: { offset: 0, "queryString": "", page: 1 } }, + { in: { page: "test" }, out: { offset: 0, "queryString": "", page: 1 } }, + { in: { page: null }, out: { offset: 0, "queryString": "", page: 1 } }, + { in: { page: -1 }, out: { offset: 0, "queryString": "", page: 1 } }, + { in: { page: 0 }, out: { offset: 0, "queryString": "", page: 1 } }, + { in: { page: 1 }, out: { offset: 0, "queryString": "", page: 1 } }, + { in: { page: 5 }, out: { offset: limit*4, "queryString": "", page: 5 } }, + { in: { page: 10, other: "test" }, out: { offset: limit*9, "queryString": "other=test", page: 10 } }, + { in: { other: "test" }, out: { offset: 0, "queryString": "other=test", page: 1 } }, + ]; + for(let i in cases) { + test(`should contain ${cases[i].out} for an input of ${cases[i].in}`, () => { + expect(pageQueryConverter(cases[i].in, limit)).toStrictEqual(cases[i].out) + }); + } +}); diff --git a/helpers/paramconverter.js b/helpers/paramconverter.js index e8650a3e..5a588199 100644 --- a/helpers/paramconverter.js +++ b/helpers/paramconverter.js @@ -1,6 +1,6 @@ 'use strict'; -const { ObjectId } = require(__dirname+'/../db/db.js') +const { ObjectId } = require('mongodb') , timeFieldRegex = /^(?[\d]+y)?(?[\d]+mo)?(?[\d]+w)?(?[\d]+d)?(?[\d]+h)?(?[\d]+m)?(?[\d]+s)?$/ , timeUtils = require(__dirname+'/timeutils.js') , dynamicResponse = require(__dirname+'/dynamic.js') diff --git a/helpers/paramconverter.test.js b/helpers/paramconverter.test.js new file mode 100644 index 00000000..26af8a05 --- /dev/null +++ b/helpers/paramconverter.test.js @@ -0,0 +1,41 @@ +const paramConverter = require('./paramconverter.js'); +const { WEEK, DAY, HOUR } = require('./timeutils.js'); +/* +const defaultOptions = { + timeFields: [], + trimFields: [], + allowedArrays: [], + numberFields: [], + numberArrays: [], + objectIdParams: [], + objectIdFields: [], + objectIdArrays: [], + processThreadIdParam: false, + processDateParam: false, + processMessageLength: false, +}; +*/ + +describe('paramconverter', () => { + + const cases = [ + { + in: { options: { trimFields: ['username', 'password'] }, body: { username: 'trimmed ', password: 'trimmed ' } }, + out: { username: 'trimmed', password: 'trimmed' } + }, + { + in: { options: { timeFields: ['test'] }, body: { test: '1w2d3h' } }, + out: { test: WEEK+(2*DAY)+(3*HOUR) } + }, + //todo: add a bunch more + ]; + + for(let i in cases) { + test(`should output ${cases[i].out} for an input of ${cases[i].in}`, () => { + const converter = paramConverter(cases[i].in.options); + converter({ body: cases[i].in.body }, {}, () => {}); + expect(cases[i].in.body).toStrictEqual(cases[i].out); + }); + } + +}); diff --git a/helpers/permission.test.js b/helpers/permission.test.js new file mode 100644 index 00000000..22dd8c8f --- /dev/null +++ b/helpers/permission.test.js @@ -0,0 +1,73 @@ +const randomRange = require('./randomrange.js'); +const Permission = require('./permission.js'); +const Permissions = require('./permissions.js'); + +describe('testing permissions', () => { + + const NO_PERMISSION = new Permission(); + + const ANON = new Permission(); + ANON.setAll([ + Permissions.USE_MARKDOWN_PINKTEXT, Permissions.USE_MARKDOWN_GREENTEXT, Permissions.USE_MARKDOWN_BOLD, + Permissions.USE_MARKDOWN_UNDERLINE, Permissions.USE_MARKDOWN_STRIKETHROUGH, Permissions.USE_MARKDOWN_TITLE, + Permissions.USE_MARKDOWN_ITALIC, Permissions.USE_MARKDOWN_SPOILER, Permissions.USE_MARKDOWN_MONO, + Permissions.USE_MARKDOWN_CODE, Permissions.USE_MARKDOWN_DETECTED, Permissions.USE_MARKDOWN_LINK, + Permissions.USE_MARKDOWN_DICE, Permissions.USE_MARKDOWN_FORTUNE, Permissions.CREATE_BOARD, + Permissions.CREATE_ACCOUNT + ]); + + test('test a permission they have = true', () => { + expect(ANON.get(Permissions.CREATE_ACCOUNT)).toBe(true); + }); + + test('test a permission they dont have = false', () => { + expect(ANON.get(Permissions.ROOT)).toBe(false); + }); + + const BOARD_STAFF = new Permission(ANON.base64) + BOARD_STAFF.setAll([ + Permissions.MANAGE_BOARD_GENERAL, Permissions.MANAGE_BOARD_BANS, Permissions.MANAGE_BOARD_LOGS, + ]); + const BOARD_OWNER = new Permission(BOARD_STAFF.base64) + BOARD_OWNER.setAll([ + Permissions.MANAGE_BOARD_OWNER, Permissions.MANAGE_BOARD_STAFF, Permissions.MANAGE_BOARD_CUSTOMISATION, + Permissions.MANAGE_BOARD_SETTINGS, + ]); + + test('BO has all board perms', () => { + Permissions._MANAGE_BOARD_BITS.every(b => expect(BOARD_OWNER.get(b)).toBe(true)); + }); + + test('applyInheritance() gives BO all board perms as long as they have Permissions.MANAGE_BOARD_OWNER', () => { + BOARD_OWNER.setAll(Permissions._MANAGE_BOARD_BITS, false); + BOARD_OWNER.set(Permissions.MANAGE_BOARD_OWNER); + BOARD_OWNER.applyInheritance(); + Permissions._MANAGE_BOARD_BITS.every(b => expect(BOARD_OWNER.get(b)).toBe(true)); + }); + + const GLOBAL_STAFF = new Permission(BOARD_OWNER.base64); + GLOBAL_STAFF.setAll([ + Permissions.MANAGE_GLOBAL_GENERAL, Permissions.MANAGE_GLOBAL_BANS, Permissions.MANAGE_GLOBAL_LOGS, Permissions.MANAGE_GLOBAL_NEWS, + Permissions.MANAGE_GLOBAL_BOARDS, Permissions.MANAGE_GLOBAL_SETTINGS, Permissions.MANAGE_BOARD_OWNER, Permissions.BYPASS_FILTERS, + Permissions.BYPASS_BANS, Permissions.BYPASS_SPAMCHECK, Permissions.BYPASS_RATELIMITS, + ]); + const ADMIN = new Permission(GLOBAL_STAFF.base64); + ADMIN.setAll([ + Permissions.MANAGE_GLOBAL_ACCOUNTS, Permissions.MANAGE_GLOBAL_ROLES, Permissions.VIEW_RAW_IP, + ]); + const ROOT = new Permission(); + ROOT.setAll(Permission.allPermissions); + + test('root has all permissions', () => { + Permission.allPermissions.every(p => expect(ROOT.get(p)).toBe(true)); + }); + + test('applyInheritance() gives ROOT all permissions as long as they have Permissions.ROOT', () => { + NO_PERMISSION.set(Permissions.ROOT); + NO_PERMISSION.applyInheritance(); + Permission.allPermissions.every(b => expect(NO_PERMISSION.get(b)).toBe(true)); + }); + + //todo: what othe rpermissions test should be added? + +}); diff --git a/helpers/posting/diceroll.test.js b/helpers/posting/diceroll.test.js new file mode 100644 index 00000000..23a3c256 --- /dev/null +++ b/helpers/posting/diceroll.test.js @@ -0,0 +1,33 @@ +const diceroll = require('./diceroll.js'); + +describe('diceroll markdown', () => { + + const prepareCases = [ + { in: '##3d6', out: '##3d6=' }, + { in: '##99d99', out: '##99d99=' }, + { in: '##999d999', out: '##999d999' }, + { in: '##3d8+5', out: '##3d8+5=' }, + { in: '##3d8-5', out: '##3d8-5=' }, + { in: '##0d0', out: '##0d0' }, + ]; + for(let i in prepareCases) { + test(`should contain ${prepareCases[i].out} for an input of ${prepareCases[i].in}`, () => { + const output = prepareCases[i].in.replace(diceroll.regexPrepare, diceroll.prepare.bind(null, false)); + expect(output).toContain(prepareCases[i].out); + }); + } + + const markdownCases = [ + { in: '##3d6=10', out: 'Rolled 3 dice with 6 sides =' }, + { in: '##99d99=5138', out: 'Rolled 99 dice with 99 sides =' }, + { in: '##999d999=10000', out: '##999d999=10000' }, + { in: '##0d0', out: '##0d0' }, + ]; + for(let i in markdownCases) { + test(`should contain ${markdownCases[i].out} for an input of ${markdownCases[i].in}`, () => { + const output = markdownCases[i].in.replace(diceroll.regexMarkdown, diceroll.markdown.bind(null, false)); + expect(output).toContain(markdownCases[i].out); + }); + } + +}); diff --git a/helpers/posting/escape.test.js b/helpers/posting/escape.test.js new file mode 100644 index 00000000..5e050520 --- /dev/null +++ b/helpers/posting/escape.test.js @@ -0,0 +1,19 @@ +const escape = require('./escape.js'); + +describe('escape() - convert some characters to html entities', () => { + const cases = [ + { in: "'", out: ''' }, + { in: '/', out: '/' }, + { in: '`', out: '`' }, + { in: '=', out: '=' }, + { in: '&', out: '&' }, + { in: '<', out: '<' }, + { in: '>', out: '>' }, + { in: '"', out: '"' }, + ]; + for(let i in cases) { + test(`should output ${cases[i].out} for an input of ${cases[i].in}`, () => { + expect(escape(cases[i].in)).toBe(cases[i].out); + }); + } +}); diff --git a/helpers/posting/fortune.js b/helpers/posting/fortune.js index 98f6b783..930f65fe 100644 --- a/helpers/posting/fortune.js +++ b/helpers/posting/fortune.js @@ -4,6 +4,8 @@ const fortunes = ['example1', 'example2', 'example3']; module.exports = { + fortunes, + regex: /##fortune/gmi, markdown: () => { diff --git a/helpers/posting/fortune.test.js b/helpers/posting/fortune.test.js new file mode 100644 index 00000000..ffc93351 --- /dev/null +++ b/helpers/posting/fortune.test.js @@ -0,0 +1,9 @@ +const fortune = require('./fortune.js'); + +describe('fortune markdown', () => { + test(`should contain a random fortune for an input of ##fortune`, () => { + const output = '##fortune'.replace(fortune.regex, fortune.markdown.bind(null, false)); + const hasFortuneText = fortune.fortunes.some(f => output.includes(f)); + expect(hasFortuneText).toBe(true); + }); +}); diff --git a/helpers/posting/linkmatch.test.js b/helpers/posting/linkmatch.test.js new file mode 100644 index 00000000..334f515a --- /dev/null +++ b/helpers/posting/linkmatch.test.js @@ -0,0 +1,42 @@ +const linkmatch = require('./linkmatch.js'); +const linkRegex = /\[(?