We'll use it as cache in the future.
/!\ You'll loose your old jobs (pending jobs too) so upgrade only when
you don't have pending job anymore.
* nginx
* PostgreSQL
+ * Redis
* **NodeJS >= 8.x**
* yarn
* OpenSSL (cli)
<div class="admin-sub-header">
<div class="admin-sub-title">Jobs list</div>
+
+ <div class="peertube-select-container">
+ <select [(ngModel)]="jobState" (ngModelChange)="onJobStateChanged()">
+ <option *ngFor="let state of jobStates" [value]="state">{{ state }}</option>
+ </select>
+ </div>
</div>
+
+
<p-dataTable
[value]="jobs" [lazy]="true" [paginator]="true" [totalRecords]="totalRecords" [rows]="rowsPerPage"
sortField="createdAt" (onLazyLoad)="loadLazy($event)" [scrollable]="true" [virtualScroll]="true" [scrollHeight]="scrollHeight"
>
<p-column field="id" header="ID" [style]="{ width: '60px' }"></p-column>
- <p-column field="category" header="Category" [style]="{ width: '130px' }"></p-column>
- <p-column field="handlerName" header="Handler name" [style]="{ width: '210px' }"></p-column>
- <p-column header="Input data">
+ <p-column field="type" header="Type" [style]="{ width: '210px' }"></p-column>
+ <p-column field="state" header="State" [style]="{ width: '130px' }"></p-column>
+ <p-column header="Payload">
<ng-template pTemplate="body" let-job="rowData">
- <pre>{{ job.handlerInputData }}</pre>
+ <pre>{{ job.data }}</pre>
</ng-template>
</p-column>
- <p-column field="state" header="State" [style]="{ width: '100px' }"></p-column>
<p-column field="createdAt" header="Created date" [sortable]="true" [style]="{ width: '250px' }"></p-column>
<p-column field="updatedAt" header="Updated date" [style]="{ width: '250px' }"></p-column>
</p-dataTable>
+@import '_variables';
+@import '_mixins';
+
+.peertube-select-container {
+ @include peertube-select-container(auto);
+}
+
pre {
font-size: 11px;
}
import { NotificationsService } from 'angular2-notifications'
import { SortMeta } from 'primeng/primeng'
import { Job } from '../../../../../../shared/index'
+import { JobState } from '../../../../../../shared/models'
import { RestPagination, RestTable } from '../../../shared'
import { viewportHeight } from '../../../shared/misc/utils'
import { JobService } from '../shared'
styleUrls: [ './jobs-list.component.scss' ]
})
export class JobsListComponent extends RestTable implements OnInit {
+ jobState: JobState = 'inactive'
+ jobStates: JobState[] = [ 'active', 'complete', 'failed', 'inactive', 'delayed' ]
jobs: Job[] = []
totalRecords = 0
rowsPerPage = 20
- sort: SortMeta = { field: 'createdAt', order: 1 }
+ sort: SortMeta = { field: 'createdAt', order: -1 }
pagination: RestPagination = { count: this.rowsPerPage, start: 0 }
scrollHeight = ''
this.scrollHeight = (viewportHeight() - 380) + 'px'
}
+ onJobStateChanged () {
+ this.loadData()
+ }
+
protected loadData () {
this.jobsService
- .getJobs(this.pagination, this.sort)
+ .getJobs(this.jobState, this.pagination, this.sort)
.subscribe(
resultList => {
this.jobs = resultList.data
import 'rxjs/add/operator/map'
import { Observable } from 'rxjs/Observable'
import { ResultList } from '../../../../../../shared'
+import { JobState } from '../../../../../../shared/models'
import { Job } from '../../../../../../shared/models/job.model'
import { environment } from '../../../../environments/environment'
import { RestExtractor, RestPagination, RestService } from '../../../shared'
private restExtractor: RestExtractor
) {}
- getJobs (pagination: RestPagination, sort: SortMeta): Observable<ResultList<Job>> {
+ getJobs (state: JobState, pagination: RestPagination, sort: SortMeta): Observable<ResultList<Job>> {
let params = new HttpParams()
params = this.restService.addRestGetParams(params, pagination, sort)
- return this.authHttp.get<ResultList<Job>>(JobService.BASE_JOB_URL, { params })
+ return this.authHttp.get<ResultList<Job>>(JobService.BASE_JOB_URL + '/' + state, { params })
.map(res => this.restExtractor.convertResultListDateToHuman(res))
.map(res => this.restExtractor.applyToResultListData(res, this.prettyPrintData))
.catch(err => this.restExtractor.handleError(err))
}
private prettyPrintData (obj: Job) {
- const handlerInputData = JSON.stringify(obj.handlerInputData, null, 2)
+ const data = JSON.stringify(obj.data, null, 2)
- return Object.assign(obj, { handlerInputData })
+ return Object.assign(obj, { data })
}
}
username: 'peertube'
password: 'peertube'
+redis:
+ hostname: 'localhost'
+ port: 6379
+ auth: null
+
# From the project root directory
storage:
avatars: 'storage/avatars/'
username: 'peertube'
password: 'peertube'
+redis:
+ hostname: 'localhost'
+ port: 6379
+ auth: null
+
# From the project root directory
storage:
avatars: '/var/www/peertube/storage/avatars/'
"js-yaml": "^3.5.4",
"jsonld": "^0.5.12",
"jsonld-signatures": "https://github.com/Chocobozzz/jsonld-signatures#rsa2017",
+ "kue": "^0.11.6",
"lodash": "^4.11.1",
"magnet-uri": "^5.1.4",
"mkdirp": "^0.5.1",
"@types/chai": "^4.0.4",
"@types/config": "^0.0.33",
"@types/express": "^4.0.35",
+ "@types/kue": "^0.11.8",
"@types/lodash": "^4.14.64",
"@types/magnet-uri": "^5.1.1",
"@types/mkdirp": "^0.5.1",
rm -f "./config/local-test.json"
rm -f "./config/local-test-$i.json"
createdb "peertube_test$i"
+ redis-cli KEYS "q-localhost:900$i*" | grep -v empty | xargs --no-run-if-empty redis-cli DEL
done
import { join } from 'path'
import { createInterface } from 'readline'
import * as winston from 'winston'
-import { labelFormatter, loggerFormat, timestampFormatter } from '../server/helpers/logger'
+import { labelFormatter } from '../server/helpers/logger'
import { CONFIG } from '../server/initializers/constants'
+const excludedKeys = {
+ level: true,
+ message: true,
+ splat: true,
+ timestamp: true,
+ label: true
+}
+function keysExcluder (key, value) {
+ return excludedKeys[key] === true ? undefined : value
+}
+
+const loggerFormat = winston.format.printf((info) => {
+ let additionalInfos = JSON.stringify(info, keysExcluder, 2)
+ if (additionalInfos === '{}') additionalInfos = ''
+ else additionalInfos = ' ' + additionalInfos
+
+ return `[${info.label}] ${new Date(info.timestamp).toISOString()} ${info.level}: ${info.message}${additionalInfos}`
+})
+
const logger = new winston.createLogger({
transports: [
new winston.transports.Console({
level: 'debug',
stderrLevels: [],
format: winston.format.combine(
- timestampFormatter,
winston.format.splat(),
labelFormatter,
winston.format.colorize(),
// ----------- PeerTube modules -----------
import { installApplication } from './server/initializers'
-import { activitypubHttpJobScheduler, transcodingJobScheduler } from './server/lib/jobs'
+import { JobQueue } from './server/lib/job-queue'
import { VideosPreviewCache } from './server/lib/cache'
import { apiRouter, clientsRouter, staticRouter, servicesRouter, webfingerRouter, activityPubRouter } from './server/controllers'
import { BadActorFollowScheduler } from './server/lib/schedulers/bad-actor-follow-scheduler'
+import { RemoveOldJobsScheduler } from './server/lib/schedulers/remove-old-jobs-scheduler'
// ----------- Command line -----------
server.listen(port, () => {
VideosPreviewCache.Instance.init(CONFIG.CACHE.PREVIEWS.SIZE)
BadActorFollowScheduler.Instance.enable()
-
- activitypubHttpJobScheduler.activate()
- transcodingJobScheduler.activate()
+ RemoveOldJobsScheduler.Instance.enable()
+ JobQueue.Instance.init()
logger.info('Server listening on port %d', port)
logger.info('Web server: %s', CONFIG.WEBSERVER.URL)
import * as express from 'express'
+import { ResultList } from '../../../shared'
+import { Job, JobType, JobState } from '../../../shared/models'
import { UserRight } from '../../../shared/models/users'
-import { getFormattedObjects } from '../../helpers/utils'
+import { JobQueue } from '../../lib/job-queue'
import {
- asyncMiddleware, authenticate, ensureUserHasRight, jobsSortValidator, setDefaultPagination,
+ asyncMiddleware,
+ authenticate,
+ ensureUserHasRight,
+ jobsSortValidator,
+ setDefaultPagination,
setDefaultSort
} from '../../middlewares'
import { paginationValidator } from '../../middlewares/validators'
-import { JobModel } from '../../models/job/job'
+import { listJobsValidator } from '../../middlewares/validators/jobs'
const jobsRouter = express.Router()
-jobsRouter.get('/',
+jobsRouter.get('/:state',
authenticate,
ensureUserHasRight(UserRight.MANAGE_JOBS),
paginationValidator,
jobsSortValidator,
setDefaultSort,
setDefaultPagination,
+ asyncMiddleware(listJobsValidator),
asyncMiddleware(listJobs)
)
// ---------------------------------------------------------------------------
async function listJobs (req: express.Request, res: express.Response, next: express.NextFunction) {
- const resultList = await JobModel.listForApi(req.query.start, req.query.count, req.query.sort)
+ const sort = req.query.sort === 'createdAt' ? 'asc' : 'desc'
+
+ const jobs = await JobQueue.Instance.listForApi(req.params.state, req.query.start, req.query.count, sort)
+ const total = await JobQueue.Instance.count(req.params.state)
+
+ const result: ResultList<any> = {
+ total,
+ data: jobs.map(j => formatJob(j.toJSON()))
+ }
+ return res.json(result)
+}
- return res.json(getFormattedObjects(resultList.data, resultList.total))
+function formatJob (job: any): Job {
+ return {
+ id: job.id,
+ state: job.state as JobState,
+ type: job.type as JobType,
+ data: job.data,
+ error: job.error,
+ createdAt: new Date(parseInt(job.created_at, 10)),
+ updatedAt: new Date(parseInt(job.updated_at, 10))
+ }
}
actorFollow.ActorFollower = fromActor
// Send a notification to remote server
- await sendFollow(actorFollow, t)
+ await sendFollow(actorFollow)
})
}
} from '../../../initializers'
import { fetchRemoteVideoDescription, getVideoActivityPubUrl, shareVideoByServerAndChannel } from '../../../lib/activitypub'
import { sendCreateVideo, sendCreateViewToOrigin, sendCreateViewToVideoFollowers, sendUpdateVideo } from '../../../lib/activitypub/send'
-import { transcodingJobScheduler } from '../../../lib/jobs/transcoding-job-scheduler'
+import { JobQueue } from '../../../lib/job-queue'
import {
asyncMiddleware, authenticate, paginationValidator, setDefaultSort, setDefaultPagination, videosAddValidator, videosGetValidator,
videosRemoveValidator, videosSearchValidator, videosSortValidator, videosUpdateValidator
)
await Promise.all(tasks)
- return sequelizeTypescript.transaction(async t => {
+ const videoCreated = await sequelizeTypescript.transaction(async t => {
const sequelizeOptions = { transaction: t }
- if (CONFIG.TRANSCODING.ENABLED === true) {
- // Put uuid because we don't have id auto incremented for now
- const dataInput = {
- videoUUID: video.uuid
- }
-
- await transcodingJobScheduler.createJob(t, 'videoFileOptimizer', dataInput)
- }
-
const videoCreated = await video.save(sequelizeOptions)
// Do not forget to add video channel information to the created video
videoCreated.VideoChannel = res.locals.videoChannel
return videoCreated
})
+
+ if (CONFIG.TRANSCODING.ENABLED === true) {
+ // Put uuid because we don't have id auto incremented for now
+ const dataInput = {
+ videoUUID: videoCreated.uuid
+ }
+
+ await JobQueue.Instance.createJob({ type: 'video-file', payload: dataInput })
+ }
+
+ return videoCreated
}
async function updateVideoRetryWrapper (req: express.Request, res: express.Response, next: express.NextFunction) {
--- /dev/null
+import { JobState } from '../../../shared/models'
+import { exists } from './misc'
+
+const jobStates: JobState[] = [ 'active', 'complete', 'failed', 'inactive', 'delayed' ]
+
+function isValidJobState (value: JobState) {
+ return exists(value) && jobStates.indexOf(value) !== -1
+}
+
+// ---------------------------------------------------------------------------
+
+export {
+ isValidJobState
+}
.catch(err => callback(err))
})
.catch(err => {
+ console.error(err)
logger.error(options.errorMessage, err)
throw err
})
import { IConfig } from 'config'
import { dirname, join } from 'path'
-import { JobCategory, JobState, VideoRateType } from '../../shared/models'
+import { JobType, VideoRateType } from '../../shared/models'
import { ActivityPubActorType } from '../../shared/models/activitypub'
import { FollowState } from '../../shared/models/actors'
import { VideoPrivacy } from '../../shared/models/videos'
// ---------------------------------------------------------------------------
-const LAST_MIGRATION_VERSION = 175
+const LAST_MIGRATION_VERSION = 180
// ---------------------------------------------------------------------------
const SORTABLE_COLUMNS = {
USERS: [ 'id', 'username', 'createdAt' ],
ACCOUNTS: [ 'createdAt' ],
- JOBS: [ 'id', 'createdAt' ],
+ JOBS: [ 'createdAt' ],
VIDEO_ABUSES: [ 'id', 'createdAt' ],
VIDEO_CHANNELS: [ 'id', 'name', 'updatedAt', 'createdAt' ],
VIDEOS: [ 'name', 'duration', 'createdAt', 'views', 'likes' ],
WS: 'wss'
}
-const JOB_STATES: { [ id: string ]: JobState } = {
- PENDING: 'pending',
- PROCESSING: 'processing',
- ERROR: 'error',
- SUCCESS: 'success'
-}
-const JOB_CATEGORIES: { [ id: string ]: JobCategory } = {
- TRANSCODING: 'transcoding',
- ACTIVITYPUB_HTTP: 'activitypub-http'
+const JOB_ATTEMPTS: { [ id in JobType ]: number } = {
+ 'activitypub-http-broadcast': 5,
+ 'activitypub-http-unicast': 5,
+ 'activitypub-http-fetcher': 5,
+ 'video-file': 1
}
-// How many maximum jobs we fetch from the database per cycle
-const JOBS_FETCH_LIMIT_PER_CYCLE = {
- transcoding: 10,
- httpRequest: 20
+const JOB_CONCURRENCY: { [ id in JobType ]: number } = {
+ 'activitypub-http-broadcast': 1,
+ 'activitypub-http-unicast': 5,
+ 'activitypub-http-fetcher': 1,
+ 'video-file': 1
}
-// 1 minutes
-let JOBS_FETCHING_INTERVAL = 60000
+// 2 days
+const JOB_COMPLETED_LIFETIME = 60000 * 60 * 24 * 2
// 1 hour
let SCHEDULER_INTERVAL = 60000 * 60
USERNAME: config.get<string>('database.username'),
PASSWORD: config.get<string>('database.password')
},
+ REDIS: {
+ HOSTNAME: config.get<string>('redis.hostname'),
+ PORT: config.get<string>('redis.port'),
+ AUTH: config.get<string>('redis.auth')
+ },
STORAGE: {
AVATARS_DIR: buildPath(config.get<string>('storage.avatars')),
LOG_DIR: buildPath(config.get<string>('storage.logs')),
PUBLIC: 'https://www.w3.org/ns/activitystreams#Public',
COLLECTION_ITEMS_PER_PAGE: 10,
FETCH_PAGE_LIMIT: 100,
- MAX_HTTP_ATTEMPT: 5,
URL_MIME_TYPES: {
VIDEO: Object.keys(VIDEO_MIMETYPE_EXT),
TORRENT: [ 'application/x-bittorrent' ],
// Special constants for a test instance
if (isTestInstance() === true) {
ACTOR_FOLLOW_SCORE.BASE = 20
- JOBS_FETCHING_INTERVAL = 1000
REMOTE_SCHEME.HTTP = 'http'
REMOTE_SCHEME.WS = 'ws'
STATIC_MAX_AGE = '0'
CONFIG,
CONSTRAINTS_FIELDS,
EMBED_SIZE,
- JOB_STATES,
- JOBS_FETCH_LIMIT_PER_CYCLE,
- JOBS_FETCHING_INTERVAL,
- JOB_CATEGORIES,
+ JOB_CONCURRENCY,
+ JOB_ATTEMPTS,
LAST_MIGRATION_VERSION,
OAUTH_LIFETIME,
OPENGRAPH_AND_OEMBED_COMMENT,
VIDEO_RATE_TYPES,
VIDEO_MIMETYPE_EXT,
AVATAR_MIMETYPE_EXT,
- SCHEDULER_INTERVAL
+ SCHEDULER_INTERVAL,
+ JOB_COMPLETED_LIFETIME
}
// ---------------------------------------------------------------------------
import { ActorFollowModel } from '../models/activitypub/actor-follow'
import { ApplicationModel } from '../models/application/application'
import { AvatarModel } from '../models/avatar/avatar'
-import { JobModel } from '../models/job/job'
import { OAuthClientModel } from '../models/oauth/oauth-client'
import { OAuthTokenModel } from '../models/oauth/oauth-token'
import { ServerModel } from '../models/server/server'
ActorFollowModel,
AvatarModel,
AccountModel,
- JobModel,
OAuthClientModel,
OAuthTokenModel,
ServerModel,
-import { values } from 'lodash'
import * as Sequelize from 'sequelize'
import { createPrivateAndPublicKeys } from '../../helpers/peertube-crypto'
import { shareVideoByServerAndChannel } from '../../lib/activitypub/share'
import { getVideoActivityPubUrl, getVideoChannelActivityPubUrl } from '../../lib/activitypub/url'
import { createLocalAccountWithoutKeys } from '../../lib/user'
import { ApplicationModel } from '../../models/application/application'
-import { JOB_CATEGORIES, SERVER_ACTOR_NAME } from '../constants'
+import { SERVER_ACTOR_NAME } from '../constants'
async function up (utils: {
transaction: Sequelize.Transaction,
{
const data = {
- type: Sequelize.ENUM(values(JOB_CATEGORIES)),
+ type: Sequelize.ENUM('transcoding', 'activitypub-http'),
defaultValue: 'transcoding',
allowNull: false
}
--- /dev/null
+import * as Sequelize from 'sequelize'
+
+async function up (utils: {
+ transaction: Sequelize.Transaction,
+ queryInterface: Sequelize.QueryInterface,
+ sequelize: Sequelize.Sequelize
+}): Promise<void> {
+ await utils.queryInterface.dropTable('job')
+}
+
+function down (options) {
+ throw new Error('Not implemented.')
+}
+
+export {
+ up,
+ down
+}
actor = await retryTransactionWrapper(saveActorAndServerAndModelIfNotExist, options)
}
- return refreshActorIfNeeded(actor)
+ const options = {
+ arguments: [ actor ],
+ errorMessage: 'Cannot refresh actor if needed with many retries.'
+ }
+ return retryTransactionWrapper(refreshActorIfNeeded, options)
}
function buildActorInstance (type: ActivityPubActorType, url: string, preferredUsername: string, uuid?: string) {
async function refreshActorIfNeeded (actor: ActorModel) {
if (!actor.isOutdated()) return actor
- const actorUrl = await getUrlFromWebfinger(actor.preferredUsername, actor.getHost())
- const result = await fetchRemoteActor(actorUrl)
- if (result === undefined) {
- logger.warn('Cannot fetch remote actor in refresh actor.')
- return actor
- }
-
- return sequelizeTypescript.transaction(async t => {
- updateInstanceWithAnother(actor, result.actor)
-
- if (result.avatarName !== undefined) {
- await updateActorAvatarInstance(actor, result.avatarName, t)
+ try {
+ const actorUrl = await getUrlFromWebfinger(actor.preferredUsername, actor.getHost())
+ const result = await fetchRemoteActor(actorUrl)
+ if (result === undefined) {
+ logger.warn('Cannot fetch remote actor in refresh actor.')
+ return actor
}
- // Force update
- actor.setDataValue('updatedAt', new Date())
- await actor.save({ transaction: t })
+ return sequelizeTypescript.transaction(async t => {
+ updateInstanceWithAnother(actor, result.actor)
- if (actor.Account) {
- await actor.save({ transaction: t })
+ if (result.avatarName !== undefined) {
+ await updateActorAvatarInstance(actor, result.avatarName, t)
+ }
- actor.Account.set('name', result.name)
- await actor.Account.save({ transaction: t })
- } else if (actor.VideoChannel) {
+ // Force update
+ actor.setDataValue('updatedAt', new Date())
await actor.save({ transaction: t })
- actor.VideoChannel.set('name', result.name)
- await actor.VideoChannel.save({ transaction: t })
- }
+ if (actor.Account) {
+ await actor.save({ transaction: t })
+
+ actor.Account.set('name', result.name)
+ await actor.Account.save({ transaction: t })
+ } else if (actor.VideoChannel) {
+ await actor.save({ transaction: t })
+
+ actor.VideoChannel.set('name', result.name)
+ await actor.VideoChannel.save({ transaction: t })
+ }
+ return actor
+ })
+ } catch (err) {
+ logger.warn('Cannot refresh actor.', err)
return actor
- })
+ }
}
function normalizeActor (actor: any) {
-import { Transaction } from 'sequelize'
import { ActorModel } from '../../models/activitypub/actor'
-import { activitypubHttpJobScheduler, ActivityPubHttpPayload } from '../jobs/activitypub-http-job-scheduler'
+import { JobQueue } from '../job-queue'
-async function addFetchOutboxJob (actor: ActorModel, t: Transaction) {
- const jobPayload: ActivityPubHttpPayload = {
+async function addFetchOutboxJob (actor: ActorModel) {
+ const payload = {
uris: [ actor.outboxUrl ]
}
- return activitypubHttpJobScheduler.createJob(t, 'activitypubHttpFetcherHandler', jobPayload)
+ return JobQueue.Instance.createJob({ type: 'activitypub-http-fetcher', payload })
}
export {
if (follow.state !== 'accepted') {
follow.set('state', 'accepted')
await follow.save()
- await addFetchOutboxJob(targetActor, undefined)
+ await addFetchOutboxJob(targetActor)
}
}
actorFollow.ActorFollowing = targetActor
// Target sends to actor he accepted the follow request
- return sendAccept(actorFollow, t)
+ return sendAccept(actorFollow)
})
logger.info('Actor %s is followed by actor %s.', targetActorURL, actor.url)
import { VideoModel } from '../../../models/video/video'
import { VideoCommentModel } from '../../../models/video/video-comment'
import { VideoShareModel } from '../../../models/video/video-share'
-import { activitypubHttpJobScheduler, ActivityPubHttpPayload } from '../../jobs/activitypub-http-job-scheduler'
+import { JobQueue } from '../../job-queue'
async function forwardActivity (
activity: Activity,
logger.debug('Creating forwarding job.', { uris })
- const jobPayload: ActivityPubHttpPayload = {
+ const payload = {
uris,
body: activity
}
-
- return activitypubHttpJobScheduler.createJob(t, 'activitypubHttpBroadcastHandler', jobPayload)
+ return JobQueue.Instance.createJob({ type: 'activitypub-http-broadcast', payload })
}
async function broadcastToFollowers (
actorsException: ActorModel[] = []
) {
const uris = await computeFollowerUris(toActorFollowers, actorsException, t)
- return broadcastTo(uris, data, byActor, t)
+ return broadcastTo(uris, data, byActor)
}
async function broadcastToActors (
data: any,
byActor: ActorModel,
toActors: ActorModel[],
- t: Transaction,
actorsException: ActorModel[] = []
) {
const uris = await computeUris(toActors, actorsException)
- return broadcastTo(uris, data, byActor, t)
+ return broadcastTo(uris, data, byActor)
}
-async function broadcastTo (uris: string[], data: any, byActor: ActorModel, t: Transaction) {
+async function broadcastTo (uris: string[], data: any, byActor: ActorModel) {
if (uris.length === 0) return undefined
logger.debug('Creating broadcast job.', { uris })
- const jobPayload: ActivityPubHttpPayload = {
+ const payload = {
uris,
signatureActorId: byActor.id,
body: data
}
- return activitypubHttpJobScheduler.createJob(t, 'activitypubHttpBroadcastHandler', jobPayload)
+ return JobQueue.Instance.createJob({ type: 'activitypub-http-broadcast', payload })
}
-async function unicastTo (data: any, byActor: ActorModel, toActorUrl: string, t: Transaction) {
+async function unicastTo (data: any, byActor: ActorModel, toActorUrl: string) {
logger.debug('Creating unicast job.', { uri: toActorUrl })
- const jobPayload: ActivityPubHttpPayload = {
- uris: [ toActorUrl ],
+ const payload = {
+ uri: toActorUrl,
signatureActorId: byActor.id,
body: data
}
- return activitypubHttpJobScheduler.createJob(t, 'activitypubHttpUnicastHandler', jobPayload)
+ return JobQueue.Instance.createJob({ type: 'activitypub-http-unicast', payload })
}
function getOriginVideoAudience (video: VideoModel, actorsInvolvedInVideo: ActorModel[]) {
-import { Transaction } from 'sequelize'
import { ActivityAccept, ActivityFollow } from '../../../../shared/models/activitypub'
import { ActorModel } from '../../../models/activitypub/actor'
import { ActorFollowModel } from '../../../models/activitypub/actor-follow'
import { unicastTo } from './misc'
import { followActivityData } from './send-follow'
-async function sendAccept (actorFollow: ActorFollowModel, t: Transaction) {
+async function sendAccept (actorFollow: ActorFollowModel) {
const follower = actorFollow.ActorFollower
const me = actorFollow.ActorFollowing
const url = getActorFollowAcceptActivityPubUrl(actorFollow)
const data = acceptActivityData(url, me, followData)
- return unicastTo(data, me, follower.inboxUrl, t)
+ return unicastTo(data, me, follower.inboxUrl)
}
// ---------------------------------------------------------------------------
const audience = getOriginVideoAudience(video, actorsInvolvedInVideo)
const data = await createActivityData(url, byActor, announcedActivity, t, audience)
- return unicastTo(data, byActor, video.VideoChannel.Account.Actor.sharedInboxUrl, t)
+ return unicastTo(data, byActor, video.VideoChannel.Account.Actor.sharedInboxUrl)
}
async function announceActivityData (
import { VideoCommentModel } from '../../../models/video/video-comment'
import { getVideoAbuseActivityPubUrl, getVideoDislikeActivityPubUrl, getVideoViewActivityPubUrl } from '../url'
import {
- audiencify, broadcastToActors, broadcastToFollowers, getActorsInvolvedInVideo, getAudience, getObjectFollowersAudience,
- getOriginVideoAudience, getOriginVideoCommentAudience,
+ audiencify,
+ broadcastToActors,
+ broadcastToFollowers,
+ getActorsInvolvedInVideo,
+ getAudience,
+ getObjectFollowersAudience,
+ getOriginVideoAudience,
+ getOriginVideoCommentAudience,
unicastTo
} from './misc'
const audience = { to: [ video.VideoChannel.Account.Actor.url ], cc: [] }
const data = await createActivityData(url, byActor, videoAbuse.toActivityPubObject(), t, audience)
- return unicastTo(data, byActor, video.VideoChannel.Account.Actor.sharedInboxUrl, t)
+ return unicastTo(data, byActor, video.VideoChannel.Account.Actor.sharedInboxUrl)
}
async function sendCreateVideoCommentToOrigin (comment: VideoCommentModel, t: Transaction) {
// This was a reply, send it to the parent actors
const actorsException = [ byActor ]
- await broadcastToActors(data, byActor, threadParentComments.map(c => c.Account.Actor), t, actorsException)
+ await broadcastToActors(data, byActor, threadParentComments.map(c => c.Account.Actor), actorsException)
// Broadcast to our followers
await broadcastToFollowers(data, byActor, [ byActor ], t)
// Send to origin
- return unicastTo(data, byActor, comment.Video.VideoChannel.Account.Actor.sharedInboxUrl, t)
+ return unicastTo(data, byActor, comment.Video.VideoChannel.Account.Actor.sharedInboxUrl)
}
async function sendCreateVideoCommentToVideoFollowers (comment: VideoCommentModel, t: Transaction) {
// This was a reply, send it to the parent actors
const actorsException = [ byActor ]
- await broadcastToActors(data, byActor, threadParentComments.map(c => c.Account.Actor), t, actorsException)
+ await broadcastToActors(data, byActor, threadParentComments.map(c => c.Account.Actor), actorsException)
// Broadcast to our followers
await broadcastToFollowers(data, byActor, [ byActor ], t)
const audience = getOriginVideoAudience(video, actorsInvolvedInVideo)
const data = await createActivityData(url, byActor, viewActivityData, t, audience)
- return unicastTo(data, byActor, video.VideoChannel.Account.Actor.sharedInboxUrl, t)
+ return unicastTo(data, byActor, video.VideoChannel.Account.Actor.sharedInboxUrl)
}
async function sendCreateViewToVideoFollowers (byActor: ActorModel, video: VideoModel, t: Transaction) {
const audience = getOriginVideoAudience(video, actorsInvolvedInVideo)
const data = await createActivityData(url, byActor, dislikeActivityData, t, audience)
- return unicastTo(data, byActor, video.VideoChannel.Account.Actor.sharedInboxUrl, t)
+ return unicastTo(data, byActor, video.VideoChannel.Account.Actor.sharedInboxUrl)
}
async function sendCreateDislikeToVideoFollowers (byActor: ActorModel, video: VideoModel, t: Transaction) {
-import { Transaction } from 'sequelize'
import { ActivityFollow } from '../../../../shared/models/activitypub'
import { ActorModel } from '../../../models/activitypub/actor'
import { ActorFollowModel } from '../../../models/activitypub/actor-follow'
import { getActorFollowActivityPubUrl } from '../url'
import { unicastTo } from './misc'
-function sendFollow (actorFollow: ActorFollowModel, t: Transaction) {
+function sendFollow (actorFollow: ActorFollowModel) {
const me = actorFollow.ActorFollower
const following = actorFollow.ActorFollowing
const url = getActorFollowActivityPubUrl(actorFollow)
const data = followActivityData(url, me, following)
- return unicastTo(data, me, following.inboxUrl, t)
+ return unicastTo(data, me, following.inboxUrl)
}
function followActivityData (url: string, byActor: ActorModel, targetActor: ActorModel): ActivityFollow {
const audience = getOriginVideoAudience(video, accountsInvolvedInVideo)
const data = await likeActivityData(url, byActor, video, t, audience)
- return unicastTo(data, byActor, video.VideoChannel.Account.Actor.sharedInboxUrl, t)
+ return unicastTo(data, byActor, video.VideoChannel.Account.Actor.sharedInboxUrl)
}
async function sendLikeToVideoFollowers (byActor: ActorModel, video: VideoModel, t: Transaction) {
import { Transaction } from 'sequelize'
-import {
- ActivityAudience,
- ActivityCreate,
- ActivityFollow,
- ActivityLike,
- ActivityUndo
-} from '../../../../shared/models/activitypub'
+import { ActivityAudience, ActivityCreate, ActivityFollow, ActivityLike, ActivityUndo } from '../../../../shared/models/activitypub'
import { ActorModel } from '../../../models/activitypub/actor'
import { ActorFollowModel } from '../../../models/activitypub/actor-follow'
import { VideoModel } from '../../../models/video/video'
const object = followActivityData(followUrl, me, following)
const data = await undoActivityData(undoUrl, me, object, t)
- return unicastTo(data, me, following.inboxUrl, t)
+ return unicastTo(data, me, following.inboxUrl)
}
async function sendUndoLikeToOrigin (byActor: ActorModel, video: VideoModel, t: Transaction) {
const object = await likeActivityData(likeUrl, byActor, video, t)
const data = await undoActivityData(undoUrl, byActor, object, t, audience)
- return unicastTo(data, byActor, video.VideoChannel.Account.Actor.sharedInboxUrl, t)
+ return unicastTo(data, byActor, video.VideoChannel.Account.Actor.sharedInboxUrl)
}
async function sendUndoLikeToVideoFollowers (byActor: ActorModel, video: VideoModel, t: Transaction) {
const data = await undoActivityData(undoUrl, byActor, object, t, audience)
- return unicastTo(data, byActor, video.VideoChannel.Account.Actor.sharedInboxUrl, t)
+ return unicastTo(data, byActor, video.VideoChannel.Account.Actor.sharedInboxUrl)
}
async function sendUndoDislikeToVideoFollowers (byActor: ActorModel, video: VideoModel, t: Transaction) {
--- /dev/null
+import * as kue from 'kue'
+import { logger } from '../../../helpers/logger'
+import { doRequest } from '../../../helpers/requests'
+import { ActorFollowModel } from '../../../models/activitypub/actor-follow'
+import { buildSignedRequestOptions, computeBody } from './utils/activitypub-http-utils'
+
+export type ActivitypubHttpBroadcastPayload = {
+ uris: string[]
+ signatureActorId?: number
+ body: any
+}
+
+async function processActivityPubHttpBroadcast (job: kue.Job) {
+ logger.info('Processing ActivityPub broadcast in job %d.', job.id)
+
+ const payload = job.data as ActivitypubHttpBroadcastPayload
+
+ const body = await computeBody(payload)
+ const httpSignatureOptions = await buildSignedRequestOptions(payload)
+
+ const options = {
+ method: 'POST',
+ uri: '',
+ json: body,
+ httpSignature: httpSignatureOptions
+ }
+
+ const badUrls: string[] = []
+ const goodUrls: string[] = []
+
+ for (const uri of payload.uris) {
+ options.uri = uri
+
+ try {
+ await doRequest(options)
+ goodUrls.push(uri)
+ } catch (err) {
+ badUrls.push(uri)
+ }
+ }
+
+ return ActorFollowModel.updateActorFollowsScoreAndRemoveBadOnes(goodUrls, badUrls, undefined)
+}
+
+// ---------------------------------------------------------------------------
+
+export {
+ processActivityPubHttpBroadcast
+}
--- /dev/null
+import * as kue from 'kue'
+import { logger } from '../../../helpers/logger'
+import { doRequest } from '../../../helpers/requests'
+import { ACTIVITY_PUB } from '../../../initializers'
+import { processActivities } from '../../activitypub/process'
+import { ActivitypubHttpBroadcastPayload } from './activitypub-http-broadcast'
+
+export type ActivitypubHttpFetcherPayload = {
+ uris: string[]
+}
+
+async function processActivityPubHttpFetcher (job: kue.Job) {
+ logger.info('Processing ActivityPub fetcher in job %d.', job.id)
+
+ const payload = job.data as ActivitypubHttpBroadcastPayload
+
+ const options = {
+ method: 'GET',
+ uri: '',
+ json: true,
+ activityPub: true
+ }
+
+ for (const uri of payload.uris) {
+ options.uri = uri
+ logger.info('Fetching ActivityPub data on %s.', uri)
+
+ const response = await doRequest(options)
+ const firstBody = response.body
+
+ if (firstBody.first && Array.isArray(firstBody.first.orderedItems)) {
+ const activities = firstBody.first.orderedItems
+
+ logger.info('Processing %i items ActivityPub fetcher for %s.', activities.length, options.uri)
+
+ await processActivities(activities)
+ }
+
+ let limit = ACTIVITY_PUB.FETCH_PAGE_LIMIT
+ let i = 0
+ let nextLink = firstBody.first.next
+ while (nextLink && i < limit) {
+ options.uri = nextLink
+
+ const { body } = await doRequest(options)
+ nextLink = body.next
+ i++
+
+ if (Array.isArray(body.orderedItems)) {
+ const activities = body.orderedItems
+ logger.info('Processing %i items ActivityPub fetcher for %s.', activities.length, options.uri)
+
+ await processActivities(activities)
+ }
+ }
+ }
+}
+
+// ---------------------------------------------------------------------------
+
+export {
+ processActivityPubHttpFetcher
+}
--- /dev/null
+import * as kue from 'kue'
+import { logger } from '../../../helpers/logger'
+import { doRequest } from '../../../helpers/requests'
+import { ActorFollowModel } from '../../../models/activitypub/actor-follow'
+import { buildSignedRequestOptions, computeBody } from './utils/activitypub-http-utils'
+
+export type ActivitypubHttpUnicastPayload = {
+ uri: string
+ signatureActorId?: number
+ body: any
+}
+
+async function processActivityPubHttpUnicast (job: kue.Job) {
+ logger.info('Processing ActivityPub unicast in job %d.', job.id)
+
+ const payload = job.data as ActivitypubHttpUnicastPayload
+ const uri = payload.uri
+
+ const body = await computeBody(payload)
+ const httpSignatureOptions = await buildSignedRequestOptions(payload)
+
+ const options = {
+ method: 'POST',
+ uri,
+ json: body,
+ httpSignature: httpSignatureOptions
+ }
+
+ try {
+ await doRequest(options)
+ ActorFollowModel.updateActorFollowsScoreAndRemoveBadOnes([ uri ], [], undefined)
+ } catch (err) {
+ ActorFollowModel.updateActorFollowsScoreAndRemoveBadOnes([], [ uri ], undefined)
+
+ throw err
+ }
+}
+
+// ---------------------------------------------------------------------------
+
+export {
+ processActivityPubHttpUnicast
+}
--- /dev/null
+import { buildSignedActivity } from '../../../../helpers/activitypub'
+import { getServerActor } from '../../../../helpers/utils'
+import { ActorModel } from '../../../../models/activitypub/actor'
+
+async function computeBody (payload: { body: any, signatureActorId?: number }) {
+ let body = payload.body
+
+ if (payload.signatureActorId) {
+ const actorSignature = await ActorModel.load(payload.signatureActorId)
+ if (!actorSignature) throw new Error('Unknown signature actor id.')
+ body = await buildSignedActivity(actorSignature, payload.body)
+ }
+
+ return body
+}
+
+async function buildSignedRequestOptions (payload: { signatureActorId?: number }) {
+ let actor: ActorModel
+ if (payload.signatureActorId) {
+ actor = await ActorModel.load(payload.signatureActorId)
+ if (!actor) throw new Error('Unknown signature actor id.')
+ } else {
+ // We need to sign the request, so use the server
+ actor = await getServerActor()
+ }
+
+ const keyId = actor.getWebfingerUrl()
+ return {
+ algorithm: 'rsa-sha256',
+ authorizationHeaderName: 'Signature',
+ keyId,
+ key: actor.privateKey
+ }
+}
+
+export {
+ computeBody,
+ buildSignedRequestOptions
+}
--- /dev/null
+import * as kue from 'kue'
+import { VideoResolution } from '../../../../shared'
+import { VideoPrivacy } from '../../../../shared/models/videos'
+import { logger } from '../../../helpers/logger'
+import { computeResolutionsToTranscode } from '../../../helpers/utils'
+import { sequelizeTypescript } from '../../../initializers'
+import { VideoModel } from '../../../models/video/video'
+import { shareVideoByServerAndChannel } from '../../activitypub'
+import { sendCreateVideo, sendUpdateVideo } from '../../activitypub/send'
+import { JobQueue } from '../job-queue'
+
+export type VideoFilePayload = {
+ videoUUID: string
+ resolution?: VideoResolution
+}
+
+async function processVideoFile (job: kue.Job) {
+ const payload = job.data as VideoFilePayload
+ logger.info('Processing video file in job %d.', job.id)
+
+ const video = await VideoModel.loadByUUIDAndPopulateAccountAndServerAndTags(payload.videoUUID)
+ // No video, maybe deleted?
+ if (!video) {
+ logger.info('Do not process job %d, video does not exist.', job.id, { videoUUID: video.uuid })
+ return undefined
+ }
+
+ // Transcoding in other resolution
+ if (payload.resolution) {
+ await video.transcodeOriginalVideofile(payload.resolution)
+ await onVideoFileTranscoderSuccess(video)
+ } else {
+ await video.optimizeOriginalVideofile()
+ await onVideoFileOptimizerSuccess(video)
+ }
+
+ return video
+}
+
+async function onVideoFileTranscoderSuccess (video: VideoModel) {
+ if (video === undefined) return undefined
+
+ // Maybe the video changed in database, refresh it
+ const videoDatabase = await VideoModel.loadByUUIDAndPopulateAccountAndServerAndTags(video.uuid)
+ // Video does not exist anymore
+ if (!videoDatabase) return undefined
+
+ if (video.privacy !== VideoPrivacy.PRIVATE) {
+ await sendUpdateVideo(video, undefined)
+ }
+
+ return undefined
+}
+
+async function onVideoFileOptimizerSuccess (video: VideoModel) {
+ if (video === undefined) return undefined
+
+ // Maybe the video changed in database, refresh it
+ const videoDatabase = await VideoModel.loadByUUIDAndPopulateAccountAndServerAndTags(video.uuid)
+ // Video does not exist anymore
+ if (!videoDatabase) return undefined
+
+ if (video.privacy !== VideoPrivacy.PRIVATE) {
+ // Now we'll add the video's meta data to our followers
+ await sendCreateVideo(video, undefined)
+ await shareVideoByServerAndChannel(video, undefined)
+ }
+
+ const originalFileHeight = await videoDatabase.getOriginalFileHeight()
+
+ // Create transcoding jobs if there are enabled resolutions
+ const resolutionsEnabled = computeResolutionsToTranscode(originalFileHeight)
+ logger.info(
+ 'Resolutions computed for video %s and origin file height of %d.', videoDatabase.uuid, originalFileHeight,
+ { resolutions: resolutionsEnabled }
+ )
+
+ if (resolutionsEnabled.length !== 0) {
+ try {
+ await sequelizeTypescript.transaction(async t => {
+ const tasks: Promise<any>[] = []
+
+ for (const resolution of resolutionsEnabled) {
+ const dataInput = {
+ videoUUID: videoDatabase.uuid,
+ resolution
+ }
+
+ const p = JobQueue.Instance.createJob({ type: 'video-file', payload: dataInput })
+ tasks.push(p)
+ }
+
+ await Promise.all(tasks)
+ })
+
+ logger.info('Transcoding jobs created for uuid %s.', videoDatabase.uuid, { resolutionsEnabled })
+ } catch (err) {
+ logger.warn('Cannot transcode the video.', err)
+ }
+ } else {
+ logger.info('No transcoding jobs created for video %s (no resolutions enabled).')
+ return undefined
+ }
+}
+
+// ---------------------------------------------------------------------------
+
+export {
+ processVideoFile
+}
--- /dev/null
+export * from './job-queue'
--- /dev/null
+import * as kue from 'kue'
+import { JobType, JobState } from '../../../shared/models'
+import { logger } from '../../helpers/logger'
+import { CONFIG, JOB_ATTEMPTS, JOB_COMPLETED_LIFETIME, JOB_CONCURRENCY } from '../../initializers'
+import { ActivitypubHttpBroadcastPayload, processActivityPubHttpBroadcast } from './handlers/activitypub-http-broadcast'
+import { ActivitypubHttpFetcherPayload, processActivityPubHttpFetcher } from './handlers/activitypub-http-fetcher'
+import { ActivitypubHttpUnicastPayload, processActivityPubHttpUnicast } from './handlers/activitypub-http-unicast'
+import { processVideoFile, VideoFilePayload } from './handlers/video-file'
+
+type CreateJobArgument =
+ { type: 'activitypub-http-broadcast', payload: ActivitypubHttpBroadcastPayload } |
+ { type: 'activitypub-http-unicast', payload: ActivitypubHttpUnicastPayload } |
+ { type: 'activitypub-http-fetcher', payload: ActivitypubHttpFetcherPayload } |
+ { type: 'video-file', payload: VideoFilePayload }
+
+const handlers: { [ id in JobType ]: (job: kue.Job) => Promise<any>} = {
+ 'activitypub-http-broadcast': processActivityPubHttpBroadcast,
+ 'activitypub-http-unicast': processActivityPubHttpUnicast,
+ 'activitypub-http-fetcher': processActivityPubHttpFetcher,
+ 'video-file': processVideoFile
+}
+
+class JobQueue {
+
+ private static instance: JobQueue
+
+ private jobQueue: kue.Queue
+ private initialized = false
+
+ private constructor () {}
+
+ init () {
+ // Already initialized
+ if (this.initialized === true) return
+ this.initialized = true
+
+ this.jobQueue = kue.createQueue({
+ prefix: 'q-' + CONFIG.WEBSERVER.HOST,
+ redis: {
+ host: CONFIG.REDIS.HOSTNAME,
+ port: CONFIG.REDIS.PORT,
+ auth: CONFIG.REDIS.AUTH
+ }
+ })
+
+ this.jobQueue.on('error', err => {
+ logger.error('Error in job queue.', err)
+ process.exit(-1)
+ })
+ this.jobQueue.watchStuckJobs(5000)
+
+ for (const handlerName of Object.keys(handlers)) {
+ this.jobQueue.process(handlerName, JOB_CONCURRENCY[handlerName], async (job, done) => {
+ try {
+ const res = await handlers[ handlerName ](job)
+ return done(null, res)
+ } catch (err) {
+ return done(err)
+ }
+ })
+ }
+ }
+
+ createJob (obj: CreateJobArgument, priority = 'normal') {
+ return new Promise((res, rej) => {
+ this.jobQueue
+ .create(obj.type, obj.payload)
+ .priority(priority)
+ .attempts(JOB_ATTEMPTS[obj.type])
+ .backoff({ type: 'exponential' })
+ .save(err => {
+ if (err) return rej(err)
+
+ return res()
+ })
+ })
+ }
+
+ listForApi (state: JobState, start: number, count: number, sort: string) {
+ return new Promise<kue.Job[]>((res, rej) => {
+ kue.Job.rangeByState(state, start, count, sort, (err, jobs) => {
+ if (err) return rej(err)
+
+ return res(jobs)
+ })
+ })
+ }
+
+ count (state: JobState) {
+ return new Promise<number>((res, rej) => {
+ this.jobQueue[state + 'Count']((err, total) => {
+ if (err) return rej(err)
+
+ return res(total)
+ })
+ })
+ }
+
+ removeOldJobs () {
+ const now = new Date().getTime()
+ kue.Job.rangeByState('complete', 0, -1, 'asc', (err, jobs) => {
+ if (err) {
+ logger.error('Cannot get jobs when removing old jobs.', err)
+ return
+ }
+
+ for (const job of jobs) {
+ if (now - job.created_at > JOB_COMPLETED_LIFETIME) {
+ job.remove()
+ }
+ }
+ })
+ }
+
+ static get Instance () {
+ return this.instance || (this.instance = new this())
+ }
+}
+
+// ---------------------------------------------------------------------------
+
+export {
+ JobQueue
+}
+++ /dev/null
-import { logger } from '../../../helpers/logger'
-import { doRequest } from '../../../helpers/requests'
-import { ActorFollowModel } from '../../../models/activitypub/actor-follow'
-import { ActivityPubHttpPayload, buildSignedRequestOptions, computeBody, maybeRetryRequestLater } from './activitypub-http-job-scheduler'
-
-async function process (payload: ActivityPubHttpPayload, jobId: number) {
- logger.info('Processing ActivityPub broadcast in job %d.', jobId)
-
- const body = await computeBody(payload)
- const httpSignatureOptions = await buildSignedRequestOptions(payload)
-
- const options = {
- method: 'POST',
- uri: '',
- json: body,
- httpSignature: httpSignatureOptions
- }
-
- const badUrls: string[] = []
- const goodUrls: string[] = []
-
- for (const uri of payload.uris) {
- options.uri = uri
-
- try {
- await doRequest(options)
- goodUrls.push(uri)
- } catch (err) {
- const isRetryingLater = await maybeRetryRequestLater(err, payload, uri)
- if (isRetryingLater === false) badUrls.push(uri)
- }
- }
-
- return ActorFollowModel.updateActorFollowsScoreAndRemoveBadOnes(goodUrls, badUrls, undefined)
-}
-
-function onError (err: Error, jobId: number) {
- logger.error('Error when broadcasting ActivityPub request in job %d.', jobId, err)
- return Promise.resolve()
-}
-
-function onSuccess (jobId: number) {
- logger.info('Job %d is a success.', jobId)
- return Promise.resolve()
-}
-
-// ---------------------------------------------------------------------------
-
-export {
- process,
- onError,
- onSuccess
-}
+++ /dev/null
-import { logger } from '../../../helpers/logger'
-import { doRequest } from '../../../helpers/requests'
-import { ACTIVITY_PUB } from '../../../initializers'
-import { processActivities } from '../../activitypub/process'
-import { ActivityPubHttpPayload } from './activitypub-http-job-scheduler'
-
-async function process (payload: ActivityPubHttpPayload, jobId: number) {
- logger.info('Processing ActivityPub fetcher in job %d.', jobId)
-
- const options = {
- method: 'GET',
- uri: '',
- json: true,
- activityPub: true
- }
-
- for (const uri of payload.uris) {
- options.uri = uri
- logger.info('Fetching ActivityPub data on %s.', uri)
-
- const response = await doRequest(options)
- const firstBody = response.body
-
- if (firstBody.first && Array.isArray(firstBody.first.orderedItems)) {
- const activities = firstBody.first.orderedItems
-
- logger.info('Processing %i items ActivityPub fetcher for %s.', activities.length, options.uri)
-
- await processActivities(activities)
- }
-
- let limit = ACTIVITY_PUB.FETCH_PAGE_LIMIT
- let i = 0
- let nextLink = firstBody.first.next
- while (nextLink && i < limit) {
- options.uri = nextLink
-
- const { body } = await doRequest(options)
- nextLink = body.next
- i++
-
- if (Array.isArray(body.orderedItems)) {
- const activities = body.orderedItems
- logger.info('Processing %i items ActivityPub fetcher for %s.', activities.length, options.uri)
-
- await processActivities(activities)
- }
- }
- }
-}
-
-function onError (err: Error, jobId: number) {
- logger.error('Error when fetcher ActivityPub request in job %d.', jobId, err)
- return Promise.resolve()
-}
-
-function onSuccess (jobId: number) {
- logger.info('Job %d is a success.', jobId)
- return Promise.resolve()
-}
-
-// ---------------------------------------------------------------------------
-
-export {
- process,
- onError,
- onSuccess
-}
+++ /dev/null
-import { JobCategory } from '../../../../shared'
-import { buildSignedActivity } from '../../../helpers/activitypub'
-import { logger } from '../../../helpers/logger'
-import { getServerActor } from '../../../helpers/utils'
-import { ACTIVITY_PUB } from '../../../initializers'
-import { ActorModel } from '../../../models/activitypub/actor'
-import { ActorFollowModel } from '../../../models/activitypub/actor-follow'
-import { JobHandler, JobScheduler } from '../job-scheduler'
-
-import * as activitypubHttpBroadcastHandler from './activitypub-http-broadcast-handler'
-import * as activitypubHttpFetcherHandler from './activitypub-http-fetcher-handler'
-import * as activitypubHttpUnicastHandler from './activitypub-http-unicast-handler'
-
-type ActivityPubHttpPayload = {
- uris: string[]
- signatureActorId?: number
- body?: any
- attemptNumber?: number
-}
-
-const jobHandlers: { [ handlerName: string ]: JobHandler<ActivityPubHttpPayload, void> } = {
- activitypubHttpBroadcastHandler,
- activitypubHttpUnicastHandler,
- activitypubHttpFetcherHandler
-}
-const jobCategory: JobCategory = 'activitypub-http'
-
-const activitypubHttpJobScheduler = new JobScheduler(jobCategory, jobHandlers)
-
-async function maybeRetryRequestLater (err: Error, payload: ActivityPubHttpPayload, uri: string) {
- logger.warn('Cannot make request to %s.', uri, err)
-
- let attemptNumber = payload.attemptNumber || 1
- attemptNumber += 1
-
- if (attemptNumber < ACTIVITY_PUB.MAX_HTTP_ATTEMPT) {
- logger.debug('Retrying request to %s (attempt %d/%d).', uri, attemptNumber, ACTIVITY_PUB.MAX_HTTP_ATTEMPT, err)
-
- const actor = await ActorFollowModel.loadByFollowerInbox(uri, undefined)
- if (!actor) {
- logger.debug('Actor %s is not a follower, do not retry the request.', uri)
- return false
- }
-
- const newPayload = Object.assign(payload, {
- uris: [ uri ],
- attemptNumber
- })
- await activitypubHttpJobScheduler.createJob(undefined, 'activitypubHttpUnicastHandler', newPayload)
-
- return true
- }
-
- return false
-}
-
-async function computeBody (payload: ActivityPubHttpPayload) {
- let body = payload.body
-
- if (payload.signatureActorId) {
- const actorSignature = await ActorModel.load(payload.signatureActorId)
- if (!actorSignature) throw new Error('Unknown signature actor id.')
- body = await buildSignedActivity(actorSignature, payload.body)
- }
-
- return body
-}
-
-async function buildSignedRequestOptions (payload: ActivityPubHttpPayload) {
- let actor: ActorModel
- if (payload.signatureActorId) {
- actor = await ActorModel.load(payload.signatureActorId)
- if (!actor) throw new Error('Unknown signature actor id.')
- } else {
- // We need to sign the request, so use the server
- actor = await getServerActor()
- }
-
- const keyId = actor.getWebfingerUrl()
- return {
- algorithm: 'rsa-sha256',
- authorizationHeaderName: 'Signature',
- keyId,
- key: actor.privateKey
- }
-}
-
-export {
- ActivityPubHttpPayload,
- activitypubHttpJobScheduler,
- maybeRetryRequestLater,
- computeBody,
- buildSignedRequestOptions
-}
+++ /dev/null
-import { logger } from '../../../helpers/logger'
-import { doRequest } from '../../../helpers/requests'
-import { ActorFollowModel } from '../../../models/activitypub/actor-follow'
-import { ActivityPubHttpPayload, buildSignedRequestOptions, computeBody, maybeRetryRequestLater } from './activitypub-http-job-scheduler'
-
-async function process (payload: ActivityPubHttpPayload, jobId: number) {
- logger.info('Processing ActivityPub unicast in job %d.', jobId)
-
- const uri = payload.uris[0]
-
- const body = await computeBody(payload)
- const httpSignatureOptions = await buildSignedRequestOptions(payload)
-
- const options = {
- method: 'POST',
- uri,
- json: body,
- httpSignature: httpSignatureOptions
- }
-
- try {
- await doRequest(options)
- ActorFollowModel.updateActorFollowsScoreAndRemoveBadOnes([ uri ], [], undefined)
- } catch (err) {
- const isRetryingLater = await maybeRetryRequestLater(err, payload, uri)
- if (isRetryingLater === false) {
- ActorFollowModel.updateActorFollowsScoreAndRemoveBadOnes([], [ uri ], undefined)
- }
-
- throw err
- }
-}
-
-function onError (err: Error, jobId: number) {
- logger.error('Error when sending ActivityPub request in job %d.', jobId, err)
- return Promise.resolve()
-}
-
-function onSuccess (jobId: number) {
- logger.info('Job %d is a success.', jobId)
- return Promise.resolve()
-}
-
-// ---------------------------------------------------------------------------
-
-export {
- process,
- onError,
- onSuccess
-}
+++ /dev/null
-export * from './activitypub-http-job-scheduler'
+++ /dev/null
-export * from './activitypub-http-job-scheduler'
-export * from './transcoding-job-scheduler'
+++ /dev/null
-import { AsyncQueue, forever, queue } from 'async'
-import * as Sequelize from 'sequelize'
-import { JobCategory } from '../../../shared'
-import { logger } from '../../helpers/logger'
-import { JOB_STATES, JOBS_FETCH_LIMIT_PER_CYCLE, JOBS_FETCHING_INTERVAL } from '../../initializers'
-import { JobModel } from '../../models/job/job'
-
-export interface JobHandler<P, T> {
- process (data: object, jobId: number): Promise<T>
- onError (err: Error, jobId: number)
- onSuccess (jobId: number, jobResult: T, jobScheduler: JobScheduler<P, T>): Promise<any>
-}
-type JobQueueCallback = (err: Error) => void
-
-class JobScheduler<P, T> {
-
- constructor (
- private jobCategory: JobCategory,
- private jobHandlers: { [ id: string ]: JobHandler<P, T> }
- ) {}
-
- async activate () {
- const limit = JOBS_FETCH_LIMIT_PER_CYCLE[this.jobCategory]
-
- logger.info('Jobs scheduler %s activated.', this.jobCategory)
-
- const jobsQueue = queue<JobModel, JobQueueCallback>(this.processJob.bind(this))
-
- // Finish processing jobs from a previous start
- const state = JOB_STATES.PROCESSING
- try {
- const jobs = await JobModel.listWithLimitByCategory(limit, state, this.jobCategory)
-
- this.enqueueJobs(jobsQueue, jobs)
- } catch (err) {
- logger.error('Cannot list pending jobs.', err)
- }
-
- forever(
- async next => {
- if (jobsQueue.length() !== 0) {
- // Finish processing the queue first
- return setTimeout(next, JOBS_FETCHING_INTERVAL)
- }
-
- const state = JOB_STATES.PENDING
- try {
- const jobs = await JobModel.listWithLimitByCategory(limit, state, this.jobCategory)
-
- this.enqueueJobs(jobsQueue, jobs)
- } catch (err) {
- logger.error('Cannot list pending jobs.', err)
- }
-
- // Optimization: we could use "drain" from queue object
- return setTimeout(next, JOBS_FETCHING_INTERVAL)
- },
-
- err => logger.error('Error in job scheduler queue.', err)
- )
- }
-
- createJob (transaction: Sequelize.Transaction, handlerName: string, handlerInputData: P) {
- const createQuery = {
- state: JOB_STATES.PENDING,
- category: this.jobCategory,
- handlerName,
- handlerInputData
- }
-
- const options = { transaction }
-
- return JobModel.create(createQuery, options)
- }
-
- private enqueueJobs (jobsQueue: AsyncQueue<JobModel>, jobs: JobModel[]) {
- jobs.forEach(job => jobsQueue.push(job))
- }
-
- private async processJob (job: JobModel, callback: (err: Error) => void) {
- const jobHandler = this.jobHandlers[job.handlerName]
- if (jobHandler === undefined) {
- const errorString = 'Unknown job handler ' + job.handlerName + ' for job ' + job.id
- logger.error(errorString)
-
- const error = new Error(errorString)
- await this.onJobError(jobHandler, job, error)
- return callback(error)
- }
-
- logger.info('Processing job %d with handler %s.', job.id, job.handlerName)
-
- job.state = JOB_STATES.PROCESSING
- await job.save()
-
- try {
- const result: T = await jobHandler.process(job.handlerInputData, job.id)
- await this.onJobSuccess(jobHandler, job, result)
- } catch (err) {
- logger.error('Error in job handler %s.', job.handlerName, err)
-
- try {
- await this.onJobError(jobHandler, job, err)
- } catch (innerErr) {
- this.cannotSaveJobError(innerErr)
- return callback(innerErr)
- }
- }
-
- return callback(null)
- }
-
- private async onJobError (jobHandler: JobHandler<P, T>, job: JobModel, err: Error) {
- job.state = JOB_STATES.ERROR
-
- try {
- await job.save()
- if (jobHandler) await jobHandler.onError(err, job.id)
- } catch (err) {
- this.cannotSaveJobError(err)
- }
- }
-
- private async onJobSuccess (jobHandler: JobHandler<P, T>, job: JobModel, jobResult: T) {
- job.state = JOB_STATES.SUCCESS
-
- try {
- await job.save()
- await jobHandler.onSuccess(job.id, jobResult, this)
- } catch (err) {
- this.cannotSaveJobError(err)
- }
- }
-
- private cannotSaveJobError (err: Error) {
- logger.error('Cannot save new job state.', err)
- }
-}
-
-// ---------------------------------------------------------------------------
-
-export {
- JobScheduler
-}
+++ /dev/null
-export * from './transcoding-job-scheduler'
+++ /dev/null
-import { JobCategory } from '../../../../shared'
-import { VideoModel } from '../../../models/video/video'
-import { JobHandler, JobScheduler } from '../job-scheduler'
-
-import * as videoFileOptimizer from './video-file-optimizer-handler'
-import * as videoFileTranscoder from './video-file-transcoder-handler'
-
-type TranscodingJobPayload = {
- videoUUID: string
- resolution?: number
-}
-const jobHandlers: { [ handlerName: string ]: JobHandler<TranscodingJobPayload, VideoModel> } = {
- videoFileOptimizer,
- videoFileTranscoder
-}
-const jobCategory: JobCategory = 'transcoding'
-
-const transcodingJobScheduler = new JobScheduler(jobCategory, jobHandlers)
-
-export {
- TranscodingJobPayload,
- transcodingJobScheduler
-}
+++ /dev/null
-import * as Bluebird from 'bluebird'
-import { VideoPrivacy } from '../../../../shared/models/videos'
-import { logger } from '../../../helpers/logger'
-import { computeResolutionsToTranscode } from '../../../helpers/utils'
-import { sequelizeTypescript } from '../../../initializers'
-import { JobModel } from '../../../models/job/job'
-import { VideoModel } from '../../../models/video/video'
-import { shareVideoByServerAndChannel } from '../../activitypub'
-import { sendCreateVideo } from '../../activitypub/send'
-import { JobScheduler } from '../job-scheduler'
-import { TranscodingJobPayload } from './transcoding-job-scheduler'
-
-async function process (data: TranscodingJobPayload, jobId: number) {
- const video = await VideoModel.loadByUUIDAndPopulateAccountAndServerAndTags(data.videoUUID)
- // No video, maybe deleted?
- if (!video) {
- logger.info('Do not process job %d, video does not exist.', jobId, { videoUUID: video.uuid })
- return undefined
- }
-
- await video.optimizeOriginalVideofile()
-
- return video
-}
-
-function onError (err: Error, jobId: number) {
- logger.error('Error when optimized video file in job %d.', jobId, err)
- return Promise.resolve()
-}
-
-async function onSuccess (jobId: number, video: VideoModel, jobScheduler: JobScheduler<TranscodingJobPayload, VideoModel>) {
- if (video === undefined) return undefined
-
- logger.info('Job %d is a success.', jobId)
-
- // Maybe the video changed in database, refresh it
- const videoDatabase = await VideoModel.loadByUUIDAndPopulateAccountAndServerAndTags(video.uuid)
- // Video does not exist anymore
- if (!videoDatabase) return undefined
-
- if (video.privacy !== VideoPrivacy.PRIVATE) {
- // Now we'll add the video's meta data to our followers
- await sendCreateVideo(video, undefined)
- await shareVideoByServerAndChannel(video, undefined)
- }
-
- const originalFileHeight = await videoDatabase.getOriginalFileHeight()
-
- // Create transcoding jobs if there are enabled resolutions
- const resolutionsEnabled = computeResolutionsToTranscode(originalFileHeight)
- logger.info(
- 'Resolutions computed for video %s and origin file height of %d.', videoDatabase.uuid, originalFileHeight,
- { resolutions: resolutionsEnabled }
- )
-
- if (resolutionsEnabled.length !== 0) {
- try {
- await sequelizeTypescript.transaction(async t => {
- const tasks: Bluebird<JobModel>[] = []
-
- for (const resolution of resolutionsEnabled) {
- const dataInput = {
- videoUUID: videoDatabase.uuid,
- resolution
- }
-
- const p = jobScheduler.createJob(t, 'videoFileTranscoder', dataInput)
- tasks.push(p)
- }
-
- await Promise.all(tasks)
- })
-
- logger.info('Transcoding jobs created for uuid %s.', videoDatabase.uuid, { resolutionsEnabled })
- } catch (err) {
- logger.warn('Cannot transcode the video.', err)
- }
- } else {
- logger.info('No transcoding jobs created for video %s (no resolutions enabled).')
- return undefined
- }
-}
-
-// ---------------------------------------------------------------------------
-
-export {
- process,
- onError,
- onSuccess
-}
+++ /dev/null
-import { VideoResolution } from '../../../../shared'
-import { VideoPrivacy } from '../../../../shared/models/videos'
-import { logger } from '../../../helpers/logger'
-import { VideoModel } from '../../../models/video/video'
-import { sendUpdateVideo } from '../../activitypub/send'
-
-async function process (data: { videoUUID: string, resolution: VideoResolution }, jobId: number) {
- const video = await VideoModel.loadByUUIDAndPopulateAccountAndServerAndTags(data.videoUUID)
- // No video, maybe deleted?
- if (!video) {
- logger.info('Do not process job %d, video does not exist.', jobId, { videoUUID: video.uuid })
- return undefined
- }
-
- await video.transcodeOriginalVideofile(data.resolution)
-
- return video
-}
-
-function onError (err: Error, jobId: number) {
- logger.error('Error when transcoding video file in job %d.', jobId, err)
- return Promise.resolve()
-}
-
-async function onSuccess (jobId: number, video: VideoModel) {
- if (video === undefined) return undefined
-
- logger.info('Job %d is a success.', jobId)
-
- // Maybe the video changed in database, refresh it
- const videoDatabase = await VideoModel.loadByUUIDAndPopulateAccountAndServerAndTags(video.uuid)
- // Video does not exist anymore
- if (!videoDatabase) return undefined
-
- if (video.privacy !== VideoPrivacy.PRIVATE) {
- await sendUpdateVideo(video, undefined)
- }
-
- return undefined
-}
-
-// ---------------------------------------------------------------------------
-
-export {
- process,
- onError,
- onSuccess
-}
--- /dev/null
+import { JobQueue } from '../job-queue'
+import { AbstractScheduler } from './abstract-scheduler'
+
+export class RemoveOldJobsScheduler extends AbstractScheduler {
+
+ private static instance: AbstractScheduler
+
+ private constructor () {
+ super()
+ }
+
+ async execute () {
+ JobQueue.Instance.removeOldJobs()
+ }
+
+ static get Instance () {
+ return this.instance || (this.instance = new this())
+ }
+}
--- /dev/null
+import * as express from 'express'
+import { param } from 'express-validator/check'
+import { isValidJobState } from '../../helpers/custom-validators/jobs'
+import { logger } from '../../helpers/logger'
+import { areValidationErrors } from './utils'
+
+const listJobsValidator = [
+ param('state').custom(isValidJobState).not().isEmpty().withMessage('Should have a valid job state'),
+
+ async (req: express.Request, res: express.Response, next: express.NextFunction) => {
+ logger.debug('Checking listJobsValidator parameters.', { parameters: req.params })
+
+ if (areValidationErrors(req, res)) return
+
+ return next()
+ }
+]
+
+// ---------------------------------------------------------------------------
+
+export {
+ listJobsValidator
+}
+++ /dev/null
-import { values } from 'lodash'
-import { AllowNull, Column, CreatedAt, DataType, Model, Table, UpdatedAt } from 'sequelize-typescript'
-import { JobCategory, JobState } from '../../../shared/models'
-import { JOB_CATEGORIES, JOB_STATES } from '../../initializers'
-import { getSort } from '../utils'
-
-@Table({
- tableName: 'job',
- indexes: [
- {
- fields: [ 'state', 'category' ]
- }
- ]
-})
-export class JobModel extends Model<JobModel> {
- @AllowNull(false)
- @Column(DataType.ENUM(values(JOB_STATES)))
- state: JobState
-
- @AllowNull(false)
- @Column(DataType.ENUM(values(JOB_CATEGORIES)))
- category: JobCategory
-
- @AllowNull(false)
- @Column
- handlerName: string
-
- @AllowNull(true)
- @Column(DataType.JSON)
- handlerInputData: any
-
- @CreatedAt
- createdAt: Date
-
- @UpdatedAt
- updatedAt: Date
-
- static listWithLimitByCategory (limit: number, state: JobState, jobCategory: JobCategory) {
- const query = {
- order: [
- [ 'id', 'ASC' ]
- ],
- limit: limit,
- where: {
- state,
- category: jobCategory
- },
- logging: false
- }
-
- return JobModel.findAll(query)
- }
-
- static listForApi (start: number, count: number, sort: string) {
- const query = {
- offset: start,
- limit: count,
- order: [ getSort(sort) ]
- }
-
- return JobModel.findAndCountAll(query).then(({ rows, count }) => {
- return {
- data: rows,
- total: count
- }
- })
- }
-
- toFormattedJSON () {
- return {
- id: this.id,
- state: this.state,
- category: this.category,
- handlerName: this.handlerName,
- handlerInputData: this.handlerInputData,
- createdAt: this.createdAt,
- updatedAt: this.updatedAt
- }
- }
-}
import { makeGetRequest } from '../../utils/requests/requests'
describe('Test jobs API validators', function () {
- const path = '/api/v1/jobs/'
+ const path = '/api/v1/jobs/failed'
let server: ServerInfo
let userAccessToken = ''
})
describe('When listing jobs', function () {
+
+ it('Should fail with a bad state', async function () {
+ await makeGetRequest({
+ url: server.url,
+ token: server.accessToken,
+ path: path + 'ade'
+ })
+ })
+
it('Should fail with a bad start pagination', async function () {
await checkBadStartPagination(server.url, path, server.accessToken)
})
import * as chai from 'chai'
import 'mocha'
+import { JobState } from '../../../../shared/models'
import { VideoPrivacy } from '../../../../shared/models/videos'
import { VideoCommentThreadTree } from '../../../../shared/models/videos/video-comment.model'
import { completeVideoCheck, getVideo, immutableAssign, reRunServer, viewVideo } from '../../utils'
})
it('Should not have pending/processing jobs anymore', async function () {
- const res = await getJobsListPaginationAndSort(servers[0].url, servers[0].accessToken, 0, 50, '-createdAt')
- const jobs = res.body.data
+ const states: JobState[] = [ 'inactive', 'active' ]
- for (const job of jobs) {
- expect(job.state).not.to.equal('pending')
- expect(job.state).not.to.equal('processing')
+ for (const state of states) {
+ const res = await getJobsListPaginationAndSort(servers[ 0 ].url, servers[ 0 ].accessToken, state,0, 50, '-createdAt')
+ expect(res.body.data).to.have.length(0)
}
})
})
it('Should list jobs', async function () {
- const res = await getJobsList(servers[1].url, servers[1].accessToken)
+ const res = await getJobsList(servers[1].url, servers[1].accessToken, 'complete')
expect(res.body.total).to.be.above(2)
expect(res.body.data).to.have.length.above(2)
})
it('Should list jobs with sort and pagination', async function () {
- const res = await getJobsListPaginationAndSort(servers[1].url, servers[1].accessToken, 4, 1, 'createdAt')
+ const res = await getJobsListPaginationAndSort(servers[1].url, servers[1].accessToken, 'complete', 1, 1, 'createdAt')
expect(res.body.total).to.be.above(2)
expect(res.body.data).to.have.lengthOf(1)
const job = res.body.data[0]
- expect(job.state).to.equal('success')
- expect(job.category).to.equal('transcoding')
- expect(job.handlerName).to.have.length.above(3)
+
+ expect(job.state).to.equal('complete')
+ expect(job.type).to.equal('activitypub-http-unicast')
expect(dateIsValid(job.createdAt)).to.be.true
expect(dateIsValid(job.updatedAt)).to.be.true
})
it('Should like and dislikes videos on different services', async function () {
this.timeout(20000)
- const tasks: Promise<any>[] = []
- tasks.push(rateVideo(servers[0].url, servers[0].accessToken, remoteVideosServer1[0], 'like'))
- tasks.push(rateVideo(servers[0].url, servers[0].accessToken, remoteVideosServer1[0], 'dislike'))
- tasks.push(rateVideo(servers[0].url, servers[0].accessToken, remoteVideosServer1[0], 'like'))
- tasks.push(rateVideo(servers[2].url, servers[2].accessToken, localVideosServer3[1], 'like'))
- tasks.push(rateVideo(servers[2].url, servers[2].accessToken, localVideosServer3[1], 'dislike'))
- tasks.push(rateVideo(servers[2].url, servers[2].accessToken, remoteVideosServer3[1], 'dislike'))
- tasks.push(rateVideo(servers[2].url, servers[2].accessToken, remoteVideosServer3[0], 'like'))
-
- await Promise.all(tasks)
+ await rateVideo(servers[0].url, servers[0].accessToken, remoteVideosServer1[0], 'like')
+ await wait(200)
+ await rateVideo(servers[0].url, servers[0].accessToken, remoteVideosServer1[0], 'dislike')
+ await wait(200)
+ await rateVideo(servers[0].url, servers[0].accessToken, remoteVideosServer1[0], 'like')
+ await rateVideo(servers[2].url, servers[2].accessToken, localVideosServer3[1], 'like')
+ await wait(200)
+ await rateVideo(servers[2].url, servers[2].accessToken, localVideosServer3[1], 'dislike')
+ await rateVideo(servers[2].url, servers[2].accessToken, remoteVideosServer3[1], 'dislike')
+ await wait(200)
+ await rateVideo(servers[2].url, servers[2].accessToken, remoteVideosServer3[0], 'like')
await wait(10000)
import * as program from 'commander'
import { Video, VideoFile, VideoRateType } from '../../../shared'
+import { JobState } from '../../../shared/models'
import {
flushAndRunMultipleServers,
flushTests, follow,
}
async function isTherePendingRequests (servers: ServerInfo[]) {
+ const states: JobState[] = [ 'inactive', 'active' ]
const tasks: Promise<any>[] = []
let pendingRequests = false
// Check if each server has pending request
for (const server of servers) {
- const p = getJobsListPaginationAndSort(server.url, server.accessToken, 0, 10, '-createdAt')
- .then(res => {
- const jobs = res.body.data
-
- for (const job of jobs) {
- if (job.state === 'pending' || job.state === 'processing') {
- pendingRequests = true
- }
- }
- })
-
- tasks.push(p)
+ for (const state of states) {
+ const p = getJobsListPaginationAndSort(server.url, server.accessToken, state, 0, 10, '-createdAt')
+ .then(res => {
+ if (res.body.total > 0) pendingRequests = true
+ })
+ tasks.push(p)
+ }
}
await Promise.all(tasks)
import * as request from 'supertest'
+import { JobState } from '../../../../shared/models'
-function getJobsList (url: string, accessToken: string) {
- const path = '/api/v1/jobs'
+function getJobsList (url: string, accessToken: string, state: JobState) {
+ const path = '/api/v1/jobs/' + state
return request(url)
.get(path)
.expect('Content-Type', /json/)
}
-function getJobsListPaginationAndSort (url: string, accessToken: string, start: number, count: number, sort: string) {
- const path = '/api/v1/jobs'
+function getJobsListPaginationAndSort (url: string, accessToken: string, state: JobState, start: number, count: number, sort: string) {
+ const path = '/api/v1/jobs/' + state
return request(url)
.get(path)
-export type JobState = 'pending' | 'processing' | 'error' | 'success'
-export type JobCategory = 'transcoding' | 'activitypub-http'
+export type JobState = 'active' | 'complete' | 'failed' | 'inactive' | 'delayed'
+
+export type JobType = 'activitypub-http-unicast' |
+ 'activitypub-http-broadcast' |
+ 'activitypub-http-fetcher' |
+ 'video-file'
export interface Job {
id: number
state: JobState
- category: JobCategory
- handlerName: string
- handlerInputData: any
+ type: JobType
+ data: any,
+ error: any,
createdAt: Date
updatedAt: Date
}
```
$ sudo apt update
-$ sudo apt install nginx ffmpeg postgresql openssl g++ make
+$ sudo apt install nginx ffmpeg postgresql openssl g++ make redis-server
```
## Arch Linux
1. Run:
```
-$ sudo pacman -S nodejs yarn ffmpeg postgresql openssl
+$ sudo pacman -S nodejs yarn ffmpeg postgresql openssl redis
```
## Other distributions
* [TypeScript](https://www.typescriptlang.org/) -> Language
* [PostgreSQL](https://www.postgresql.org/) -> Database
+ * [Redis](https://redis.io/) -> Job queue/cache
* [Express](http://expressjs.com) -> Web server framework
* [Sequelize](http://docs.sequelizejs.com/en/v3/) -> SQL ORM
* [WebTorrent](https://webtorrent.io/) -> BitTorrent tracker and torrent creation
},
"exclude": [
"node_modules",
- "client"
+ "client",
+ "text1",
+ "text2",
+ "text3",
+ "text4",
+ "text5",
+ "text6"
]
}
version "1.0.6"
resolved "https://registry.yarnpkg.com/@types/geojson/-/geojson-1.0.6.tgz#3e02972728c69248c2af08d60a48cbb8680fffdf"
+"@types/kue@^0.11.8":
+ version "0.11.8"
+ resolved "https://registry.yarnpkg.com/@types/kue/-/kue-0.11.8.tgz#820f5e3db6025f0411e0942cd3ccab461a060c90"
+ dependencies:
+ "@types/express" "*"
+ "@types/node" "*"
+ "@types/redis" "*"
+
"@types/lodash@*", "@types/lodash@^4.14.64":
version "4.14.95"
resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.95.tgz#02c170690719bbaca8293d9c8cdcccb565728081"
version "1.9.3"
resolved "https://registry.yarnpkg.com/@types/pem/-/pem-1.9.3.tgz#0c864c8b79e43fef6367db895f60fd1edd10e86c"
+"@types/redis@*":
+ version "2.8.5"
+ resolved "https://registry.yarnpkg.com/@types/redis/-/redis-2.8.5.tgz#c4a31a63e95434202eb84908290528ad8510b149"
+ dependencies:
+ "@types/events" "*"
+ "@types/node" "*"
+
"@types/reflect-metadata@0.0.4":
version "0.0.4"
resolved "https://registry.yarnpkg.com/@types/reflect-metadata/-/reflect-metadata-0.0.4.tgz#b6477ca9a97e5265f2ac67f9ea704eae5e0eaf4d"
mime-types "~2.1.16"
negotiator "0.6.1"
+acorn-globals@^3.0.0:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/acorn-globals/-/acorn-globals-3.1.0.tgz#fd8270f71fbb4996b004fa880ee5d46573a731bf"
+ dependencies:
+ acorn "^4.0.4"
+
acorn-jsx@^3.0.0:
version "3.0.1"
resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-3.0.1.tgz#afdf9488fb1ecefc8348f6fb22f464e32a58b36b"
dependencies:
acorn "^3.0.4"
-acorn@^3.0.4:
+acorn@^3.0.4, acorn@^3.1.0, acorn@~3.3.0:
version "3.3.0"
resolved "https://registry.yarnpkg.com/acorn/-/acorn-3.3.0.tgz#45e37fb39e8da3f25baee3ff5369e2bb5f22017a"
+acorn@^4.0.4, acorn@~4.0.2:
+ version "4.0.13"
+ resolved "https://registry.yarnpkg.com/acorn/-/acorn-4.0.13.tgz#105495ae5361d697bd195c825192e1ad7f253787"
+
acorn@^5.2.1:
version "5.3.0"
resolved "https://registry.yarnpkg.com/acorn/-/acorn-5.3.0.tgz#7446d39459c54fb49a80e6ee6478149b940ec822"
micromatch "^3.1.4"
normalize-path "^2.1.1"
+"apparatus@>= 0.0.9":
+ version "0.0.9"
+ resolved "https://registry.yarnpkg.com/apparatus/-/apparatus-0.0.9.tgz#37dcd25834ad0b651076596291db823eeb1908bd"
+ dependencies:
+ sylvester ">= 0.0.8"
+
append-field@^0.1.0:
version "0.1.0"
resolved "https://registry.yarnpkg.com/append-field/-/append-field-0.1.0.tgz#6ddc58fa083c7bc545d3c5995b2830cc2366d44a"
version "4.11.8"
resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.11.8.tgz#2cde09eb5ee341f484746bb0309b3253b1b1442f"
-body-parser@1.18.2, body-parser@^1.12.4:
+body-parser@1.18.2, body-parser@^1.12.2, body-parser@^1.12.4:
version "1.18.2"
resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.18.2.tgz#87678a19d84b47d859b83199bd59bce222b10454"
dependencies:
escape-string-regexp "^1.0.5"
supports-color "^4.0.0"
+character-parser@^2.1.1:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/character-parser/-/character-parser-2.2.0.tgz#c7ce28f36d4bcd9744e5ffc2c5fcde1c73261fc0"
+ dependencies:
+ is-regex "^1.0.3"
+
charenc@~0.0.1:
version "0.0.2"
resolved "https://registry.yarnpkg.com/charenc/-/charenc-0.0.2.tgz#c0a1d2f3a7092e03774bfa83f14c0fc5790a8667"
isobject "^3.0.0"
static-extend "^0.1.1"
-clean-css@~3.4.2:
+clean-css@^3.3.0, clean-css@~3.4.2:
version "3.4.28"
resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-3.4.28.tgz#bf1945e82fc808f55695e6ddeaec01400efd03ff"
dependencies:
version "1.1.0"
resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e"
+constantinople@^3.0.1:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/constantinople/-/constantinople-3.1.0.tgz#7569caa8aa3f8d5935d62e1fa96f9f702cd81c79"
+ dependencies:
+ acorn "^3.1.0"
+ is-expression "^2.0.1"
+
contains-path@^0.1.0:
version "0.1.0"
resolved "https://registry.yarnpkg.com/contains-path/-/contains-path-0.1.0.tgz#fe8cf184ff6670b6baef01a9d4861a5cbec4120a"
version "1.0.0"
resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-1.0.0.tgz#a230f64f568310e1498009940790ec99545bca7e"
+css-parse@1.7.x:
+ version "1.7.0"
+ resolved "https://registry.yarnpkg.com/css-parse/-/css-parse-1.7.0.tgz#321f6cf73782a6ff751111390fc05e2c657d8c9b"
+
css-select@~1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/css-select/-/css-select-1.0.0.tgz#b1121ca51848dd264e2244d058cee254deeb44b0"
version "1.0.1"
resolved "https://registry.yarnpkg.com/debug-log/-/debug-log-1.0.1.tgz#2307632d4c04382b8df8a32f70b895046d52745f"
-debug@2.6.9, debug@^2.0.0, debug@^2.1.0, debug@^2.1.1, debug@^2.2.0, debug@^2.3.3, debug@^2.5.2, debug@^2.6.8, debug@^2.6.9:
- version "2.6.9"
- resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f"
+debug@*, debug@3.1.0, debug@^3.0.0, debug@^3.1.0:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261"
dependencies:
ms "2.0.0"
-debug@3.1.0, debug@^3.0.0, debug@^3.1.0:
- version "3.1.0"
- resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261"
+debug@2.6.9, debug@^2.0.0, debug@^2.1.0, debug@^2.1.1, debug@^2.2.0, debug@^2.3.3, debug@^2.5.2, debug@^2.6.8, debug@^2.6.9:
+ version "2.6.9"
+ resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f"
dependencies:
ms "2.0.0"
dependencies:
esutils "^2.0.2"
+doctypes@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/doctypes/-/doctypes-1.1.0.tgz#ea80b106a87538774e8a3a4a5afe293de489e0a9"
+
dom-serializer@0, dom-serializer@~0.1.0:
version "0.1.0"
resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-0.1.0.tgz#073c697546ce0780ce23be4a28e293e40bc30c82"
version "2.0.0"
resolved "https://registry.yarnpkg.com/dottie/-/dottie-2.0.0.tgz#da191981c8b8d713ca0115d5898cf397c2f0ddd0"
+double-ended-queue@^2.1.0-0:
+ version "2.1.0-0"
+ resolved "https://registry.yarnpkg.com/double-ended-queue/-/double-ended-queue-2.1.0-0.tgz#103d3527fd31528f40188130c841efdd78264e5c"
+
duplexer3@^0.1.4:
version "0.1.4"
resolved "https://registry.yarnpkg.com/duplexer3/-/duplexer3-0.1.4.tgz#ee01dd1cac0ed3cbc7fdbea37dc0a8f1ce002ce2"
lodash "^4.16.0"
validator "~8.2.0"
-express@^4.12.4, express@^4.13.3:
+express@^4.12.2, express@^4.12.4, express@^4.13.3:
version "4.16.2"
resolved "https://registry.yarnpkg.com/express/-/express-4.16.2.tgz#e35c6dfe2d64b7dca0a5cd4f21781be3299e076c"
dependencies:
assign-symbols "^1.0.0"
is-extendable "^1.0.1"
+extend@^1.2.1:
+ version "1.3.0"
+ resolved "https://registry.yarnpkg.com/extend/-/extend-1.3.0.tgz#d1516fb0ff5624d2ebf9123ea1dac5a1994004f8"
+
extend@^3.0.0, extend@~3.0.0, extend@~3.0.1:
version "3.0.1"
resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.1.tgz#a755ea7bc1adfcc5a31ce7e762dbaadc5e636444"
is-glob "^3.1.0"
path-dirname "^1.0.0"
+glob@7.0.x:
+ version "7.0.6"
+ resolved "https://registry.yarnpkg.com/glob/-/glob-7.0.6.tgz#211bafaf49e525b8cd93260d14ab136152b3f57a"
+ dependencies:
+ fs.realpath "^1.0.0"
+ inflight "^1.0.4"
+ inherits "2"
+ minimatch "^3.0.2"
+ once "^1.3.0"
+ path-is-absolute "^1.0.0"
+
glob@7.1.2, glob@^7.0.0, glob@^7.0.3, glob@^7.0.5, glob@^7.1.1, glob@~7.1.1:
version "7.1.2"
resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.2.tgz#c19c9df9a028702d678612384a6552404c636d15"
is-data-descriptor "^1.0.0"
kind-of "^6.0.2"
+is-expression@^2.0.1:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/is-expression/-/is-expression-2.1.0.tgz#91be9d47debcfef077977e9722be6dcfb4465ef0"
+ dependencies:
+ acorn "~3.3.0"
+ object-assign "^4.0.1"
+
+is-expression@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/is-expression/-/is-expression-3.0.0.tgz#39acaa6be7fd1f3471dc42c7416e61c24317ac9f"
+ dependencies:
+ acorn "~4.0.2"
+ object-assign "^4.0.1"
+
is-extendable@^0.1.0, is-extendable@^0.1.1:
version "0.1.1"
resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89"
dependencies:
isobject "^3.0.1"
+is-promise@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.1.0.tgz#79a2a9ece7f096e80f36d2b2f3bc16c1ff4bf3fa"
+
is-property@^1.0.0:
version "1.0.2"
resolved "https://registry.yarnpkg.com/is-property/-/is-property-1.0.2.tgz#57fe1c4e48474edd65b09911f26b1cd4095dda84"
version "1.0.0"
resolved "https://registry.yarnpkg.com/is-redirect/-/is-redirect-1.0.0.tgz#1d03dded53bd8db0f30c26e4f95d36fc7c87dc24"
-is-regex@^1.0.4:
+is-regex@^1.0.3, is-regex@^1.0.4:
version "1.0.4"
resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.0.4.tgz#5517489b547091b0930e095654ced25ee97e9491"
dependencies:
version "1.0.1"
resolved "https://registry.yarnpkg.com/js-string-escape/-/js-string-escape-1.0.1.tgz#e2625badbc0d67c7533e9edc1068c587ae4137ef"
+js-stringify@^1.0.1:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/js-stringify/-/js-stringify-1.0.2.tgz#1736fddfd9724f28a3682adc6230ae7e4e9679db"
+
js-tokens@^3.0.2:
version "3.0.2"
resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b"
json-schema "0.2.3"
verror "1.10.0"
+jstransformer@1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/jstransformer/-/jstransformer-1.0.0.tgz#ed8bf0921e2f3f1ed4d5c1a44f68709ed24722c3"
+ dependencies:
+ is-promise "^2.0.0"
+ promise "^7.0.1"
+
jsx-ast-utils@^1.3.4:
version "1.4.1"
resolved "https://registry.yarnpkg.com/jsx-ast-utils/-/jsx-ast-utils-1.4.1.tgz#3867213e8dd79bf1e8f2300c0cfc1efb182c0df1"
version "6.0.2"
resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.2.tgz#01146b36a6218e64e58f3a8d66de5d7fc6f6d051"
+kue@^0.11.6:
+ version "0.11.6"
+ resolved "https://registry.yarnpkg.com/kue/-/kue-0.11.6.tgz#5b76916bcedd56636a107861471c63c94611860a"
+ dependencies:
+ body-parser "^1.12.2"
+ express "^4.12.2"
+ lodash "^4.0.0"
+ nib "~1.1.2"
+ node-redis-warlock "~0.2.0"
+ pug "^2.0.0-beta3"
+ redis "~2.6.0-2"
+ stylus "~0.54.5"
+ yargs "^4.0.0"
+ optionalDependencies:
+ reds "^0.2.5"
+
kuler@0.0.x:
version "0.0.0"
resolved "https://registry.yarnpkg.com/kuler/-/kuler-0.0.0.tgz#b66bb46b934e550f59d818848e0abba4f7f5553c"
version "3.0.9"
resolved "https://registry.yarnpkg.com/lodash._isiterateecall/-/lodash._isiterateecall-3.0.9.tgz#5203ad7ba425fae842460e696db9cf3e6aac057c"
-lodash.assign@^4.2.0:
+lodash.assign@^4.0.3, lodash.assign@^4.0.6, lodash.assign@^4.2.0:
version "4.2.0"
resolved "https://registry.yarnpkg.com/lodash.assign/-/lodash.assign-4.2.0.tgz#0d99f3ccd7a6d261d19bdaeb9245005d285808e7"
version "1.0.0"
resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.0.tgz#4e3366b39e7f5457e35f1324bdf6f88d0bfc7306"
-lru-cache@2:
+lru-cache@2, lru-cache@^2.5.0:
version "2.7.3"
resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-2.7.3.tgz#6d4524e8b955f95d4f5b58851ce21dd72fb4e952"
for-in "^1.0.2"
is-extendable "^1.0.1"
-mkdirp@0.5.1, "mkdirp@>=0.5 0", mkdirp@^0.5.0, mkdirp@^0.5.1, mkdirp@~0.5.0:
+mkdirp@0.5.1, mkdirp@0.5.x, "mkdirp@>=0.5 0", mkdirp@^0.5.0, mkdirp@^0.5.1, mkdirp@~0.5.0:
version "0.5.1"
resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903"
dependencies:
version "1.4.0"
resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7"
+natural@^0.2.0:
+ version "0.2.1"
+ resolved "https://registry.yarnpkg.com/natural/-/natural-0.2.1.tgz#1eb5156a9d90b4591949e20e94ebc77bb2339eda"
+ dependencies:
+ apparatus ">= 0.0.9"
+ sylvester ">= 0.0.12"
+ underscore ">=1.3.1"
+
negotiator@0.6.1:
version "0.6.1"
resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.1.tgz#2b327184e8992101177b28563fb5e7102acd0ca9"
version "1.0.0"
resolved "https://registry.yarnpkg.com/next-event/-/next-event-1.0.0.tgz#e7778acde2e55802e0ad1879c39cf6f75eda61d8"
+nib@~1.1.2:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/nib/-/nib-1.1.2.tgz#6a69ede4081b95c0def8be024a4c8ae0c2cbb6c7"
+ dependencies:
+ stylus "0.54.5"
+
node-abi@^2.1.1:
version "2.1.2"
resolved "https://registry.yarnpkg.com/node-abi/-/node-abi-2.1.2.tgz#4da6caceb6685fcd31e7dd1994ef6bb7d0a9c0b2"
tar "^2.2.1"
tar-pack "^3.4.0"
+node-redis-scripty@0.0.5:
+ version "0.0.5"
+ resolved "https://registry.yarnpkg.com/node-redis-scripty/-/node-redis-scripty-0.0.5.tgz#4bf2d365ab6dab202cc08b7ac63f8f55aadc9625"
+ dependencies:
+ extend "^1.2.1"
+ lru-cache "^2.5.0"
+
+node-redis-warlock@~0.2.0:
+ version "0.2.0"
+ resolved "https://registry.yarnpkg.com/node-redis-warlock/-/node-redis-warlock-0.2.0.tgz#56395b994c828e8e32f6aae53b93b6edfcd97990"
+ dependencies:
+ node-redis-scripty "0.0.5"
+ uuid "^2.0.1"
+
node-sass@^4.0.0:
version "4.7.2"
resolved "https://registry.yarnpkg.com/node-sass/-/node-sass-4.7.2.tgz#9366778ba1469eb01438a9e8592f4262bcb6794e"
version "1.1.8"
resolved "https://registry.yarnpkg.com/progress/-/progress-1.1.8.tgz#e260c78f6161cdd9b0e56cc3e0a85de17c7a57be"
-promise@^7.1.1:
+promise@^7.0.1, promise@^7.1.1:
version "7.3.1"
resolved "https://registry.yarnpkg.com/promise/-/promise-7.3.1.tgz#064b72602b18f90f29192b8b1bc418ffd1ebd3bf"
dependencies:
dependencies:
ps-tree "^1.1.0"
+pug-attrs@^2.0.2:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/pug-attrs/-/pug-attrs-2.0.2.tgz#8be2b2225568ffa75d1b866982bff9f4111affcb"
+ dependencies:
+ constantinople "^3.0.1"
+ js-stringify "^1.0.1"
+ pug-runtime "^2.0.3"
+
+pug-code-gen@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/pug-code-gen/-/pug-code-gen-2.0.0.tgz#96aea39a9e62f1ec5d2b6a5b42a29d528c70b43d"
+ dependencies:
+ constantinople "^3.0.1"
+ doctypes "^1.1.0"
+ js-stringify "^1.0.1"
+ pug-attrs "^2.0.2"
+ pug-error "^1.3.2"
+ pug-runtime "^2.0.3"
+ void-elements "^2.0.1"
+ with "^5.0.0"
+
+pug-error@^1.3.2:
+ version "1.3.2"
+ resolved "https://registry.yarnpkg.com/pug-error/-/pug-error-1.3.2.tgz#53ae7d9d29bb03cf564493a026109f54c47f5f26"
+
+pug-filters@^2.1.5:
+ version "2.1.5"
+ resolved "https://registry.yarnpkg.com/pug-filters/-/pug-filters-2.1.5.tgz#66bf6e80d97fbef829bab0aa35eddff33fc964f3"
+ dependencies:
+ clean-css "^3.3.0"
+ constantinople "^3.0.1"
+ jstransformer "1.0.0"
+ pug-error "^1.3.2"
+ pug-walk "^1.1.5"
+ resolve "^1.1.6"
+ uglify-js "^2.6.1"
+
+pug-lexer@^3.1.0:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/pug-lexer/-/pug-lexer-3.1.0.tgz#fd087376d4a675b4f59f8fef422883434e9581a2"
+ dependencies:
+ character-parser "^2.1.1"
+ is-expression "^3.0.0"
+ pug-error "^1.3.2"
+
+pug-linker@^3.0.3:
+ version "3.0.3"
+ resolved "https://registry.yarnpkg.com/pug-linker/-/pug-linker-3.0.3.tgz#25f59eb750237f0368e59c3379764229c0189c41"
+ dependencies:
+ pug-error "^1.3.2"
+ pug-walk "^1.1.5"
+
+pug-load@^2.0.9:
+ version "2.0.9"
+ resolved "https://registry.yarnpkg.com/pug-load/-/pug-load-2.0.9.tgz#ee217c914cc1d9324d44b86c32d1df241d36de7a"
+ dependencies:
+ object-assign "^4.1.0"
+ pug-walk "^1.1.5"
+
+pug-parser@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/pug-parser/-/pug-parser-4.0.0.tgz#c9f52322e4eabe4bf5beeba64ed18373bb627801"
+ dependencies:
+ pug-error "^1.3.2"
+ token-stream "0.0.1"
+
+pug-runtime@^2.0.3:
+ version "2.0.3"
+ resolved "https://registry.yarnpkg.com/pug-runtime/-/pug-runtime-2.0.3.tgz#98162607b0fce9e254d427f33987a5aee7168bda"
+
+pug-strip-comments@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/pug-strip-comments/-/pug-strip-comments-1.0.2.tgz#d313afa01bcc374980e1399e23ebf2eb9bdc8513"
+ dependencies:
+ pug-error "^1.3.2"
+
+pug-walk@^1.1.5:
+ version "1.1.5"
+ resolved "https://registry.yarnpkg.com/pug-walk/-/pug-walk-1.1.5.tgz#90e943acbcf7021e6454cf1b32245891cba6f851"
+
+pug@^2.0.0-beta3:
+ version "2.0.0-rc.4"
+ resolved "https://registry.yarnpkg.com/pug/-/pug-2.0.0-rc.4.tgz#b7b08f6599bd5302568042b7436984fb28c80a13"
+ dependencies:
+ pug-code-gen "^2.0.0"
+ pug-filters "^2.1.5"
+ pug-lexer "^3.1.0"
+ pug-linker "^3.0.3"
+ pug-load "^2.0.9"
+ pug-parser "^4.0.0"
+ pug-runtime "^2.0.3"
+ pug-strip-comments "^1.0.2"
+
pump@^1.0.0, pump@^1.0.1:
version "1.0.3"
resolved "https://registry.yarnpkg.com/pump/-/pump-1.0.3.tgz#5dfe8311c33bbf6fc18261f9f34702c47c08a954"
indent-string "^2.1.0"
strip-indent "^1.0.1"
+redis-commands@^1.2.0:
+ version "1.3.1"
+ resolved "https://registry.yarnpkg.com/redis-commands/-/redis-commands-1.3.1.tgz#81d826f45fa9c8b2011f4cd7a0fe597d241d442b"
+
+redis-parser@^2.0.0:
+ version "2.6.0"
+ resolved "https://registry.yarnpkg.com/redis-parser/-/redis-parser-2.6.0.tgz#52ed09dacac108f1a631c07e9b69941e7a19504b"
+
+redis@^0.12.1:
+ version "0.12.1"
+ resolved "https://registry.yarnpkg.com/redis/-/redis-0.12.1.tgz#64df76ad0fc8acebaebd2a0645e8a48fac49185e"
+
+redis@~2.6.0-2:
+ version "2.6.5"
+ resolved "https://registry.yarnpkg.com/redis/-/redis-2.6.5.tgz#87c1eff4a489f94b70871f3d08b6988f23a95687"
+ dependencies:
+ double-ended-queue "^2.1.0-0"
+ redis-commands "^1.2.0"
+ redis-parser "^2.0.0"
+
+reds@^0.2.5:
+ version "0.2.5"
+ resolved "https://registry.yarnpkg.com/reds/-/reds-0.2.5.tgz#38a767f7663cd749036848697d82c74fd29bc01f"
+ dependencies:
+ natural "^0.2.0"
+ redis "^0.12.1"
+
reflect-metadata@^0.1.10:
version "0.1.12"
resolved "https://registry.yarnpkg.com/reflect-metadata/-/reflect-metadata-0.1.12.tgz#311bf0c6b63cd782f228a81abe146a2bfa9c56f2"
scss-tokenizer "^0.2.3"
yargs "^7.0.0"
+sax@0.5.x:
+ version "0.5.8"
+ resolved "https://registry.yarnpkg.com/sax/-/sax-0.5.8.tgz#d472db228eb331c2506b0e8c15524adb939d12c1"
+
scripty@^1.5.0:
version "1.7.2"
resolved "https://registry.yarnpkg.com/scripty/-/scripty-1.7.2.tgz#92367b724cb77b086729691f7b01aa57f3ddd356"
version "0.4.0"
resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.0.tgz#3e935d7ddd73631b97659956d55128e87b5084a3"
+source-map@0.1.x:
+ version "0.1.43"
+ resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.1.43.tgz#c24bc146ca517c1471f5dacbe2571b2b7f9e3346"
+ dependencies:
+ amdefine ">=0.0.4"
+
source-map@0.4.x, source-map@^0.4.2, source-map@^0.4.4:
version "0.4.4"
resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.4.4.tgz#eba4f5da9c0dc999de68032d8b4f76173652036b"
version "2.0.1"
resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a"
+stylus@0.54.5, stylus@~0.54.5:
+ version "0.54.5"
+ resolved "https://registry.yarnpkg.com/stylus/-/stylus-0.54.5.tgz#42b9560931ca7090ce8515a798ba9e6aa3d6dc79"
+ dependencies:
+ css-parse "1.7.x"
+ debug "*"
+ glob "7.0.x"
+ mkdirp "0.5.x"
+ sax "0.5.x"
+ source-map "0.1.x"
+
superagent@^3.0.0, superagent@^3.6.3:
version "3.8.2"
resolved "https://registry.yarnpkg.com/superagent/-/superagent-3.8.2.tgz#e4a11b9d047f7d3efeb3bbe536d9ec0021d16403"
dependencies:
has-flag "^2.0.0"
+"sylvester@>= 0.0.12", "sylvester@>= 0.0.8":
+ version "0.0.21"
+ resolved "https://registry.yarnpkg.com/sylvester/-/sylvester-0.0.21.tgz#2987b1ce2bd2f38b0dce2a34388884bfa4400ea7"
+
sync-request@^4.1.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/sync-request/-/sync-request-4.1.0.tgz#324b4e506fb994d2afd2a0021a455f800725f07a"
extend-shallow "^2.0.1"
regex-not "^1.0.0"
+token-stream@0.0.1:
+ version "0.0.1"
+ resolved "https://registry.yarnpkg.com/token-stream/-/token-stream-0.0.1.tgz#ceeefc717a76c4316f126d0b9dbaa55d7e7df01a"
+
toposort-class@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/toposort-class/-/toposort-class-1.0.1.tgz#7ffd1f78c8be28c3ba45cd4e1a3f5ee193bd9988"
version "2.6.2"
resolved "https://registry.yarnpkg.com/typescript/-/typescript-2.6.2.tgz#3c5b6fd7f6de0914269027f03c0946758f7673a4"
-uglify-js@^2.6:
+uglify-js@^2.6, uglify-js@^2.6.1:
version "2.8.29"
resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-2.8.29.tgz#29c5733148057bb4e1f75df35b7a9cb72e6a59dd"
dependencies:
version "2.4.0"
resolved "https://registry.yarnpkg.com/underscore.string/-/underscore.string-2.4.0.tgz#8cdd8fbac4e2d2ea1e7e2e8097c42f442280f85b"
-underscore@^1.7.0:
+underscore@>=1.3.1, underscore@^1.7.0:
version "1.8.3"
resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.8.3.tgz#4f3fb53b106e6097fcf9cb4109f2a5e9bdfa5022"
version "1.0.1"
resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713"
+uuid@^2.0.1:
+ version "2.0.3"
+ resolved "https://registry.yarnpkg.com/uuid/-/uuid-2.0.3.tgz#67e2e863797215530dff318e5bf9dcebfd47b21a"
+
uuid@^3.0.0, uuid@^3.1.0, uuid@^3.2.1:
version "3.2.1"
resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.2.1.tgz#12c528bb9d58d0b9265d9a2f6f0fe8be17ff1f14"
pump "^1.0.1"
range-slice-stream "^1.2.0"
+void-elements@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/void-elements/-/void-elements-2.0.1.tgz#c066afb582bb1cb4128d60ea92392e94d5e9dbec"
+
webfinger.js@^2.6.6:
version "2.6.6"
resolved "https://registry.yarnpkg.com/webfinger.js/-/webfinger.js-2.6.6.tgz#52ebdc85da8c8fb6beb690e8e32594c99d2ff4ae"
version "0.1.0"
resolved "https://registry.yarnpkg.com/window-size/-/window-size-0.1.0.tgz#5438cd2ea93b202efa3a19fe8887aee7c94f9c9d"
+window-size@^0.2.0:
+ version "0.2.0"
+ resolved "https://registry.yarnpkg.com/window-size/-/window-size-0.2.0.tgz#b4315bb4214a3d7058ebeee892e13fa24d98b075"
+
winston-transport@^3.0.1:
version "3.0.1"
resolved "https://registry.yarnpkg.com/winston-transport/-/winston-transport-3.0.1.tgz#8008b15eef5660c4fb3fa094d58ccbd08528c58d"
triple-beam "^1.0.1"
winston-transport "^3.0.1"
+with@^5.0.0:
+ version "5.1.1"
+ resolved "https://registry.yarnpkg.com/with/-/with-5.1.1.tgz#fa4daa92daf32c4ea94ed453c81f04686b575dfe"
+ dependencies:
+ acorn "^3.1.0"
+ acorn-globals "^3.0.0"
+
wkx@^0.4.1:
version "0.4.2"
resolved "https://registry.yarnpkg.com/wkx/-/wkx-0.4.2.tgz#776d35a634a5c22e656e4744bdeb54f83fd2ce8d"
version "3.0.2"
resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.0.2.tgz#8452b4bb7e83c7c188d8041c1a837c773d6d8bb9"
+yargs-parser@^2.4.1:
+ version "2.4.1"
+ resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-2.4.1.tgz#85568de3cf150ff49fa51825f03a8c880ddcc5c4"
+ dependencies:
+ camelcase "^3.0.0"
+ lodash.assign "^4.0.6"
+
yargs-parser@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-5.0.0.tgz#275ecf0d7ffe05c77e64e7c86e4cd94bf0e1228a"
dependencies:
camelcase "^4.1.0"
+yargs@^4.0.0:
+ version "4.8.1"
+ resolved "https://registry.yarnpkg.com/yargs/-/yargs-4.8.1.tgz#c0c42924ca4aaa6b0e6da1739dfb216439f9ddc0"
+ dependencies:
+ cliui "^3.2.0"
+ decamelize "^1.1.1"
+ get-caller-file "^1.0.1"
+ lodash.assign "^4.0.3"
+ os-locale "^1.4.0"
+ read-pkg-up "^1.0.1"
+ require-directory "^2.1.1"
+ require-main-filename "^1.0.1"
+ set-blocking "^2.0.0"
+ string-width "^1.0.1"
+ which-module "^1.0.0"
+ window-size "^0.2.0"
+ y18n "^3.2.1"
+ yargs-parser "^2.4.1"
+
yargs@^7.0.0:
version "7.1.0"
resolved "https://registry.yarnpkg.com/yargs/-/yargs-7.1.0.tgz#6ba318eb16961727f5d284f8ea003e8d6154d0c8"