mirror of https://gitgud.io/fatchan/jschan.git
make schedules able to run separately and dont make instance on import, do it in the tasks folder index
parent
7fa5708010
commit
ddd949e743
14 changed files with 286 additions and 235 deletions
@ -0,0 +1,41 @@ |
|||||||
|
'use strict'; |
||||||
|
|
||||||
|
const getConfig = require(__dirname+'/../getconfig.js'); |
||||||
|
|
||||||
|
module.exports = class Schedule { |
||||||
|
|
||||||
|
constructor (func, interval, immediate, condition) { |
||||||
|
this.func = func; |
||||||
|
this.interval = interval; |
||||||
|
this.immediate = immediate; |
||||||
|
this.condition = condition; |
||||||
|
this.intervalId = null; |
||||||
|
this.update(); |
||||||
|
} |
||||||
|
|
||||||
|
//start the schedule
|
||||||
|
start () { |
||||||
|
if (!this.intervalId) { |
||||||
|
if (this.immediate) { |
||||||
|
this.func(); |
||||||
|
} |
||||||
|
this.intervalId = setInterval(this.func, this.interval); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
//stop the schedule
|
||||||
|
stop () { |
||||||
|
clearInterval(this.interval); |
||||||
|
this.intervalId = null; |
||||||
|
} |
||||||
|
|
||||||
|
//check config and either start or stop
|
||||||
|
update () { |
||||||
|
if (!this.condition || getConfig()[this.condition]) { |
||||||
|
this.start(); |
||||||
|
} else { |
||||||
|
this.stop(); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
} |
@ -1,8 +0,0 @@ |
|||||||
'use strict'; |
|
||||||
|
|
||||||
const deleteOld = require(__dirname+'/../helpers/files/deleteold.js') |
|
||||||
, timeUtils = require(__dirname+'/../helpers/timeutils.js') |
|
||||||
|
|
||||||
module.exports = () => { |
|
||||||
return deleteOld('captcha', Date.now()-(timeUtils.MINUTE*5)); |
|
||||||
} |
|
@ -1,53 +0,0 @@ |
|||||||
'use strict'; |
|
||||||
|
|
||||||
/* |
|
||||||
prune IPs from old posts (actually, rehash them with a temporary random salt to maintain |
|
||||||
post history and prevent *-by-ip action unintentionally deleting many posts) |
|
||||||
NOTE: ips may still remain in the following collections: |
|
||||||
- bans, because bans need the IP to function |
|
||||||
- modlog actioner ips, modlogs are already auto-pruned |
|
||||||
- ratelimits, these only last 1 minute |
|
||||||
- stats, these last max of 24 hours |
|
||||||
*/ |
|
||||||
const Mongo = require(__dirname+'/../db/db.js') |
|
||||||
, { Posts } = require(__dirname+'/../db/') |
|
||||||
, { createHash, randomBytes } = require('crypto') |
|
||||||
, { pruneIps } = require(__dirname+'/../configs/main.js'); |
|
||||||
|
|
||||||
module.exports = async (days) => { |
|
||||||
const beforeDate = new Date(); |
|
||||||
beforeDate.setDate(beforeDate.getDate() - days); |
|
||||||
const beforeDateMongoId = Mongo.ObjectId.createFromTime(Math.floor(beforeDate.getTime()/1000)); |
|
||||||
const tempIpHashSecret = randomBytes(20).toString('base64'); |
|
||||||
const bulkWrites = []; |
|
||||||
await Posts.db.find({ |
|
||||||
_id: { |
|
||||||
$lte: beforeDateMongoId, |
|
||||||
}, |
|
||||||
'ip.pruned': { |
|
||||||
$ne: true |
|
||||||
} |
|
||||||
}).forEach(post => { |
|
||||||
const randomIP = createHash('sha256').update(tempIpHashSecret + post.ip.single).digest('base64'); |
|
||||||
bulkWrites.push({ |
|
||||||
updateOne: { |
|
||||||
filter: { |
|
||||||
_id: post._id, |
|
||||||
}, |
|
||||||
update: { |
|
||||||
$set: { |
|
||||||
'ip.pruned': true, |
|
||||||
'ip.raw': randomIP, |
|
||||||
'ip.single': randomIP, |
|
||||||
'ip.qrange': randomIP, |
|
||||||
'ip.hrange': randomIP, |
|
||||||
} |
|
||||||
} |
|
||||||
} |
|
||||||
}); |
|
||||||
}); |
|
||||||
console.log(`Randomising ip on ${bulkWrites.length} posts`); |
|
||||||
if (bulkWrites.length.length > 0) { |
|
||||||
await Posts.db.bulkWrite(bulkWrites); |
|
||||||
} |
|
||||||
} |
|
@ -1,35 +0,0 @@ |
|||||||
'use strict'; |
|
||||||
|
|
||||||
const Files = require(__dirname+'/../db/files.js') |
|
||||||
, { debugLogs } = require(__dirname+'/../configs/main.js') |
|
||||||
, { remove } = require('fs-extra') |
|
||||||
, uploadDirectory = require(__dirname+'/../helpers/files/uploadDirectory.js'); |
|
||||||
|
|
||||||
module.exports = async(fileNames) => { |
|
||||||
const query = { |
|
||||||
'count': { |
|
||||||
'$lte': 0 |
|
||||||
} |
|
||||||
} |
|
||||||
if (fileNames) { |
|
||||||
query['_id'] = { |
|
||||||
'$in': fileNames |
|
||||||
}; |
|
||||||
} |
|
||||||
const unreferenced = await Files.db.find(query, { |
|
||||||
'projection': { |
|
||||||
'count': 0, |
|
||||||
'size': 0 |
|
||||||
} |
|
||||||
}).toArray(); |
|
||||||
await Files.db.removeMany(query); |
|
||||||
await Promise.all(unreferenced.map(async file => { |
|
||||||
debugLogs && console.log('Pruning', file._id); |
|
||||||
return Promise.all( |
|
||||||
[remove(`${uploadDirectory}/file/${file._id}`)] |
|
||||||
.concat(file.exts ? file.exts.filter(ext => ext).map(ext => { |
|
||||||
remove(`${uploadDirectory}/file/thumb/${file._id.split('.')[0]}${ext}`) |
|
||||||
}) : []) |
|
||||||
) |
|
||||||
})); |
|
||||||
} |
|
@ -0,0 +1,15 @@ |
|||||||
|
'use strict'; |
||||||
|
|
||||||
|
const deleteOld = require(__dirname+'/../../helpers/files/deleteold.js') |
||||||
|
, timeUtils = require(__dirname+'/../../helpers/timeutils.js'); |
||||||
|
|
||||||
|
module.exports = { |
||||||
|
|
||||||
|
func: async () => { |
||||||
|
return deleteOld('captcha', Date.now()-(timeUtils.MINUTE*5)) |
||||||
|
}, |
||||||
|
interval: timeUtils.MINUTE*5, |
||||||
|
immediate: true, |
||||||
|
condition: null |
||||||
|
|
||||||
|
}; |
@ -0,0 +1,11 @@ |
|||||||
|
'use strict'; |
||||||
|
|
||||||
|
const fs = require('fs-extra') |
||||||
|
, Schedule = require(__dirname+'/../Schedule.js'); |
||||||
|
|
||||||
|
fs.readdirSync(__dirname).forEach(file => { |
||||||
|
if (file === 'index.js') { return; } |
||||||
|
const name = file.substring(0, file.length-3); |
||||||
|
const { func, interval, immediate, condition } = require(__dirname+'/'+file); |
||||||
|
module.exports[name] = new Schedule(func, interval, immediate, condition); |
||||||
|
}); |
@ -0,0 +1,52 @@ |
|||||||
|
'use strict'; |
||||||
|
|
||||||
|
const Mongo = require(__dirname+'/../../db/db.js') |
||||||
|
, { Posts } = require(__dirname+'/../../db/') |
||||||
|
, { createHash, randomBytes } = require('crypto') |
||||||
|
, { pruneIps } = require(__dirname+'/../../configs/main.js') |
||||||
|
, timeUtils = require(__dirname+'/../../helpers/timeutils.js'); |
||||||
|
|
||||||
|
module.exports = { |
||||||
|
|
||||||
|
func: async (days) => { |
||||||
|
const beforeDate = new Date(); |
||||||
|
beforeDate.setDate(beforeDate.getDate() - days); |
||||||
|
const beforeDateMongoId = Mongo.ObjectId.createFromTime(Math.floor(beforeDate.getTime()/1000)); |
||||||
|
const tempIpHashSecret = randomBytes(20).toString('base64'); |
||||||
|
const bulkWrites = []; |
||||||
|
await Posts.db.find({ |
||||||
|
_id: { |
||||||
|
$lte: beforeDateMongoId, |
||||||
|
}, |
||||||
|
'ip.pruned': { |
||||||
|
$ne: true |
||||||
|
} |
||||||
|
}).forEach(post => { |
||||||
|
const randomIP = createHash('sha256').update(tempIpHashSecret + post.ip.single).digest('base64'); |
||||||
|
bulkWrites.push({ |
||||||
|
updateOne: { |
||||||
|
filter: { |
||||||
|
_id: post._id, |
||||||
|
}, |
||||||
|
update: { |
||||||
|
$set: { |
||||||
|
'ip.pruned': true, |
||||||
|
'ip.raw': randomIP, |
||||||
|
'ip.single': randomIP, |
||||||
|
'ip.qrange': randomIP, |
||||||
|
'ip.hrange': randomIP, |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
}); |
||||||
|
}); |
||||||
|
console.log(`Randomising ip on ${bulkWrites.length} posts`); |
||||||
|
if (bulkWrites.length.length > 0) { |
||||||
|
await Posts.db.bulkWrite(bulkWrites); |
||||||
|
} |
||||||
|
}, |
||||||
|
interval: timeUtils.DAY, |
||||||
|
immediate: true, |
||||||
|
condition: 'pruneIps', |
||||||
|
|
||||||
|
} |
@ -0,0 +1,44 @@ |
|||||||
|
'use strict'; |
||||||
|
|
||||||
|
const Files = require(__dirname+'/../../db/files.js') |
||||||
|
, { debugLogs } = require(__dirname+'/../../configs/main.js') |
||||||
|
, { remove } = require('fs-extra') |
||||||
|
, uploadDirectory = require(__dirname+'/../../helpers/files/uploadDirectory.js') |
||||||
|
, timeUtils = require(__dirname+'/../../helpers/timeutils.js'); |
||||||
|
|
||||||
|
module.exports = { |
||||||
|
|
||||||
|
func: async (fileNames) => { |
||||||
|
const query = { |
||||||
|
'count': { |
||||||
|
'$lte': 0 |
||||||
|
} |
||||||
|
} |
||||||
|
if (fileNames) { |
||||||
|
query['_id'] = { |
||||||
|
'$in': fileNames |
||||||
|
}; |
||||||
|
} |
||||||
|
const unreferenced = await Files.db.find(query, { |
||||||
|
'projection': { |
||||||
|
'count': 0, |
||||||
|
'size': 0 |
||||||
|
} |
||||||
|
}).toArray(); |
||||||
|
await Files.db.removeMany(query); |
||||||
|
await Promise.all(unreferenced.map(async file => { |
||||||
|
debugLogs && console.log('Pruning', file._id); |
||||||
|
return Promise.all( |
||||||
|
[remove(`${uploadDirectory}/file/${file._id}`)] |
||||||
|
.concat(file.exts ? file.exts.filter(ext => ext).map(ext => { |
||||||
|
remove(`${uploadDirectory}/file/thumb/${file._id.split('.')[0]}${ext}`) |
||||||
|
}) : []) |
||||||
|
) |
||||||
|
})); |
||||||
|
}, |
||||||
|
interval: timeUtils.DAY, |
||||||
|
immediate: true, |
||||||
|
condition: 'pruneImmediately' |
||||||
|
|
||||||
|
}; |
||||||
|
|
@ -0,0 +1,114 @@ |
|||||||
|
'use strict'; |
||||||
|
|
||||||
|
const fetch = require('node-fetch') |
||||||
|
, { debugLogs, meta } = require(__dirname+'/../../configs/main.js') |
||||||
|
, { logo, following, blacklist, proxy } = require(__dirname+'/../../configs/webring.json') |
||||||
|
, Mongo = require(__dirname+'/../../db/db.js') |
||||||
|
, { Boards, Webring } = require(__dirname+'/../../db/') |
||||||
|
, { outputFile } = require('fs-extra') |
||||||
|
, uploadDirectory = require(__dirname+'/../../helpers/files/uploadDirectory.js') |
||||||
|
, timeDiffString = require(__dirname+'/../../helpers/timediffstring.js') |
||||||
|
, SocksProxyAgent = proxy.enabled && require('socks-proxy-agent') |
||||||
|
, agent = SocksProxyAgent ? new SocksProxyAgent(require('url').parse(proxy.address)) : null |
||||||
|
, timeUtils = require(__dirname+'/../../helpers/timeutils.js'); |
||||||
|
|
||||||
|
module.exports = { |
||||||
|
|
||||||
|
func: async () => { |
||||||
|
const label = `updating webring`; |
||||||
|
const start = process.hrtime(); |
||||||
|
|
||||||
|
const visited = new Map(); |
||||||
|
let known = new Set(following); |
||||||
|
let webringBoards = []; //list of webring boards
|
||||||
|
while (known.size > visited.size) { |
||||||
|
//get sites we havent visited yet
|
||||||
|
const toVisit = [...known].filter(url => !visited.has(url)); |
||||||
|
let rings = await Promise.all(toVisit.map(url => { |
||||||
|
visited.set(url, (visited.get(url)||0)+1); |
||||||
|
return fetch(url, { |
||||||
|
agent, |
||||||
|
headers: { |
||||||
|
'User-Agent':'' |
||||||
|
} |
||||||
|
}) |
||||||
|
.then(res => res.json()) |
||||||
|
.catch(e => {}); |
||||||
|
})); |
||||||
|
for (let i = 0; i < rings.length; i++) { |
||||||
|
const ring = rings[i]; |
||||||
|
if (!ring || !ring.name || !ring.endpoint || !ring.url //malformed
|
||||||
|
|| ring.endpoint.includes(meta.url) //own site
|
||||||
|
|| visited.get(ring.endpoint) > 1) { //already seen endpoint (for multiple domain sites)
|
||||||
|
continue; |
||||||
|
} |
||||||
|
visited.set(ring.endpoint, visited.get(ring.endpoint)+1); |
||||||
|
if (ring.following && ring.following.length > 0) { |
||||||
|
//filter their folowing by blacklist/self and add to known sites
|
||||||
|
ring.following |
||||||
|
.filter(url => !blacklist.some(x => url.includes(x)) && !url.includes(meta.url)) |
||||||
|
.forEach(url => known.add(url)); |
||||||
|
} |
||||||
|
if (ring.boards && ring.boards.length > 0) { |
||||||
|
//add some stuff for the boardlist and then add their boards
|
||||||
|
ring.boards.forEach(board => { |
||||||
|
board.siteName = ring.name; |
||||||
|
//convert to numbers because old infinity webring plugin returns string
|
||||||
|
board.totalPosts = parseInt(board.totalPosts); |
||||||
|
board.postsPerHour = parseInt(board.postsPerHour); |
||||||
|
board.uniqueUsers = parseInt(board.uniqueUsers); |
||||||
|
}); |
||||||
|
webringBoards = webringBoards.concat(ring.boards); |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
if (webringBoards.length > 0) { |
||||||
|
//$out from temp collection to replace webring boards
|
||||||
|
const tempCollection = Mongo.db.collection('tempwebring'); |
||||||
|
await tempCollection.insertMany(webringBoards); |
||||||
|
await tempCollection.aggregate([ |
||||||
|
{ $out : 'webring' } |
||||||
|
]).toArray(); |
||||||
|
await tempCollection.drop(); |
||||||
|
} else { |
||||||
|
//otherwise none found, so delete them all
|
||||||
|
await Webring.deleteAll(); |
||||||
|
} |
||||||
|
|
||||||
|
//update webring.json
|
||||||
|
const boards = await Boards.webringBoards(); |
||||||
|
const json = { |
||||||
|
name: meta.siteName, |
||||||
|
url: meta.url, |
||||||
|
endpoint: `${meta.url}/webring.json`, |
||||||
|
logo, |
||||||
|
following, |
||||||
|
blacklist, |
||||||
|
known: [...known], |
||||||
|
boards: boards.map(b => { |
||||||
|
//map local boards to webring format
|
||||||
|
return { |
||||||
|
uri: b._id, |
||||||
|
title: b.settings.name, |
||||||
|
subtitle: b.settings.description, |
||||||
|
path: `${meta.url}/${b._id}/`, |
||||||
|
postsPerHour: b.pph, |
||||||
|
totalPosts: b.sequence_value-1, |
||||||
|
uniqueUsers: b.ips, |
||||||
|
nsfw: !b.settings.sfw, |
||||||
|
tags: b.settings.tags, |
||||||
|
lastPostTimestamp: b.lastPostTimestamp, |
||||||
|
}; |
||||||
|
}), |
||||||
|
} |
||||||
|
await outputFile(`${uploadDirectory}/json/webring.json`, JSON.stringify(json)); |
||||||
|
|
||||||
|
const end = process.hrtime(start); |
||||||
|
debugLogs && console.log(timeDiffString(label, end)); |
||||||
|
}, |
||||||
|
interval: timeUtils.MINUTE*15, |
||||||
|
immediate: true, |
||||||
|
condition: 'enableWebring' |
||||||
|
|
||||||
|
}; |
@ -1,106 +0,0 @@ |
|||||||
'use strict'; |
|
||||||
|
|
||||||
const fetch = require('node-fetch') |
|
||||||
, { debugLogs, meta } = require(__dirname+'/../configs/main.js') |
|
||||||
, { logo, following, blacklist, proxy } = require(__dirname+'/../configs/webring.json') |
|
||||||
, Mongo = require(__dirname+'/../db/db.js') |
|
||||||
, { Boards, Webring } = require(__dirname+'/../db/') |
|
||||||
, { outputFile } = require('fs-extra') |
|
||||||
, uploadDirectory = require(__dirname+'/../helpers/files/uploadDirectory.js') |
|
||||||
, timeDiffString = require(__dirname+'/../helpers/timediffstring.js') |
|
||||||
, SocksProxyAgent = proxy.enabled && require('socks-proxy-agent') |
|
||||||
, agent = SocksProxyAgent ? new SocksProxyAgent(require('url').parse(proxy.address)) : null |
|
||||||
|
|
||||||
module.exports = async () => { |
|
||||||
const label = `updating webring`; |
|
||||||
const start = process.hrtime(); |
|
||||||
|
|
||||||
const visited = new Map(); |
|
||||||
let known = new Set(following); |
|
||||||
let webringBoards = []; //list of webring boards
|
|
||||||
while (known.size > visited.size) { |
|
||||||
//get sites we havent visited yet
|
|
||||||
const toVisit = [...known].filter(url => !visited.has(url)); |
|
||||||
let rings = await Promise.all(toVisit.map(url => { |
|
||||||
visited.set(url, (visited.get(url)||0)+1); |
|
||||||
return fetch(url, { |
|
||||||
agent, |
|
||||||
headers: { |
|
||||||
'User-Agent':'' |
|
||||||
} |
|
||||||
}) |
|
||||||
.then(res => res.json()) |
|
||||||
.catch(e => {}); |
|
||||||
})); |
|
||||||
for (let i = 0; i < rings.length; i++) { |
|
||||||
const ring = rings[i]; |
|
||||||
if (!ring || !ring.name || !ring.endpoint || !ring.url //malformed
|
|
||||||
|| ring.endpoint.includes(meta.url) //own site
|
|
||||||
|| visited.get(ring.endpoint) > 1) { //already seen endpoint (for multiple domain sites)
|
|
||||||
continue; |
|
||||||
} |
|
||||||
visited.set(ring.endpoint, visited.get(ring.endpoint)+1); |
|
||||||
if (ring.following && ring.following.length > 0) { |
|
||||||
//filter their folowing by blacklist/self and add to known sites
|
|
||||||
ring.following |
|
||||||
.filter(url => !blacklist.some(x => url.includes(x)) && !url.includes(meta.url)) |
|
||||||
.forEach(url => known.add(url)); |
|
||||||
} |
|
||||||
if (ring.boards && ring.boards.length > 0) { |
|
||||||
//add some stuff for the boardlist and then add their boards
|
|
||||||
ring.boards.forEach(board => { |
|
||||||
board.siteName = ring.name; |
|
||||||
//convert to numbers because old infinity webring plugin returns string
|
|
||||||
board.totalPosts = parseInt(board.totalPosts); |
|
||||||
board.postsPerHour = parseInt(board.postsPerHour); |
|
||||||
board.uniqueUsers = parseInt(board.uniqueUsers); |
|
||||||
}); |
|
||||||
webringBoards = webringBoards.concat(ring.boards); |
|
||||||
} |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
if (webringBoards.length > 0) { |
|
||||||
//$out from temp collection to replace webring boards
|
|
||||||
const tempCollection = Mongo.db.collection('tempwebring'); |
|
||||||
await tempCollection.insertMany(webringBoards); |
|
||||||
await tempCollection.aggregate([ |
|
||||||
{ $out : 'webring' } |
|
||||||
]).toArray(); |
|
||||||
await tempCollection.drop(); |
|
||||||
} else { |
|
||||||
//otherwise none found, so delete them all
|
|
||||||
await Webring.deleteAll(); |
|
||||||
} |
|
||||||
|
|
||||||
//update webring.json
|
|
||||||
const boards = await Boards.webringBoards(); |
|
||||||
const json = { |
|
||||||
name: meta.siteName, |
|
||||||
url: meta.url, |
|
||||||
endpoint: `${meta.url}/webring.json`, |
|
||||||
logo, |
|
||||||
following, |
|
||||||
blacklist, |
|
||||||
known: [...known], |
|
||||||
boards: boards.map(b => { |
|
||||||
//map local boards to webring format
|
|
||||||
return { |
|
||||||
uri: b._id, |
|
||||||
title: b.settings.name, |
|
||||||
subtitle: b.settings.description, |
|
||||||
path: `${meta.url}/${b._id}/`, |
|
||||||
postsPerHour: b.pph, |
|
||||||
totalPosts: b.sequence_value-1, |
|
||||||
uniqueUsers: b.ips, |
|
||||||
nsfw: !b.settings.sfw, |
|
||||||
tags: b.settings.tags, |
|
||||||
lastPostTimestamp: b.lastPostTimestamp, |
|
||||||
}; |
|
||||||
}), |
|
||||||
} |
|
||||||
await outputFile(`${uploadDirectory}/json/webring.json`, JSON.stringify(json)); |
|
||||||
|
|
||||||
const end = process.hrtime(start); |
|
||||||
debugLogs && console.log(timeDiffString(label, end)); |
|
||||||
} |
|
Loading…
Reference in new issue