Skip to content

Commit e497009

Browse files
author
sachin-maheshwari
authored
Merge pull request #632 from topcoder-platform/dev
[PROD] Next Release
2 parents e09428d + 84384b9 commit e497009

File tree

81 files changed

+16680
-6161
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

81 files changed

+16680
-6161
lines changed

.vscode/launch.json

Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
{
2+
// Use IntelliSense to learn about possible attributes.
3+
// Hover to view descriptions of existing attributes.
4+
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
5+
"version": "0.2.0",
6+
"configurations": [
7+
{
8+
"console": "integratedTerminal",
9+
"internalConsoleOptions": "neverOpen",
10+
"name": "Nodemon TaaS API",
11+
"program": "${workspaceFolder}/app.js",
12+
"envFile": "${workspaceFolder}/.env",
13+
"request": "launch",
14+
"restart": true,
15+
"runtimeExecutable": "${workspaceFolder}/node_modules/.bin/nodemon",
16+
"skipFiles": [
17+
"<node_internals>/**"
18+
],
19+
"type": "pwa-node"
20+
},
21+
{
22+
"type": "pwa-node",
23+
"request": "launch",
24+
"name": "Launch TaaS API",
25+
"skipFiles": [
26+
"<node_internals>/**"
27+
],
28+
"program": "${workspaceFolder}/app.js",
29+
"envFile": "${workspaceFolder}/.env",
30+
"console": "integratedTerminal"
31+
},
32+
{
33+
"name": "Attach to dev:debug",
34+
"type": "node",
35+
"request": "attach",
36+
"restart": true,
37+
"port": 9229
38+
}
39+
]
40+
}

Makefile

Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
#!/bin/bash
2+
GR=\033[92m>>>
3+
NC= <<<\033[0m
4+
.PHONY: dump_tables
5+
dump_tables:
6+
@echo "${GR}make sure you checked in all your changes${NC}"
7+
@echo "${GR}going to dev${NC}"
8+
git checkout dev
9+
npm run services:down
10+
npm run services:up
11+
npm run init-db
12+
npm run migrate
13+
@echo "${GR}cool, we are now migrated to dev status... moving on to our branch${NC}"
14+
git checkout feature/interview-nylas
15+
npm run migrate
16+
@echo "${GR}now we are post-feature migration state, let's dump the tables${NC}"
17+
@mkdir -p ./.comparisons/migrate
18+
@mkdir -p ./.comparisons/init-db
19+
@docker exec -t tc-taas-postgres pg_dump -h localhost --username=postgres -t 'bookings.interviews' --schema-only postgres > ./.comparisons/migrate/interviews.sql
20+
@docker exec -t tc-taas-postgres pg_dump -h localhost --username=postgres -t 'bookings.job_candidates' --schema-only postgres > ./.comparisons/migrate/job_candidates.sql
21+
@docker exec -t tc-taas-postgres pg_dump -h localhost --username=postgres -t 'bookings.user_meeting_settings' --schema-only postgres > ./.comparisons/migrate/user_meeting_settings.sql
22+
@echo "${GR}now we revert and simply `init-db force` from the feature branch${NC}"
23+
npm run services:down
24+
npm run services:up
25+
npm run init-db force
26+
@docker exec -t tc-taas-postgres pg_dump -h localhost --username=postgres -t 'bookings.interviews' --schema-only postgres > ./.comparisons/init-db/interviews.sql
27+
@docker exec -t tc-taas-postgres pg_dump -h localhost --username=postgres -t 'bookings.job_candidates' --schema-only postgres > ./.comparisons/init-db/job_candidates.sql
28+
@docker exec -t tc-taas-postgres pg_dump -h localhost --username=postgres -t 'bookings.user_meeting_settings' --schema-only postgres > ./.comparisons/init-db/user_meeting_settings.sql
29+
@echo "${GR}All done, you can now compare the files${NC}"
30+
git diff --no-index ./.comparisons/migrate/interviews.sql ./.comparisons/init-db/interviews.sql > ./.comparisons/interviews.diff || true
31+
git diff --no-index ./.comparisons/migrate/job_candidates.sql ./.comparisons/init-db/job_candidates.sql > ./.comparisons/job_candidates.diff || true
32+
git diff --no-index ./.comparisons/migrate/user_meeting_settings.sql ./.comparisons/init-db/user_meeting_settings.sql > ./.comparisons/user_meeting_settings.diff || true
33+
34+
35+
36+
.PHONY: reboot
37+
reboot:
38+
npm run services:down
39+
npm run services:up
40+
npm run init-db
41+
npm run migrate
42+
npm run local:init || true
43+
npm run test

README.md

Lines changed: 34 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -40,13 +40,15 @@
4040
AUTH0_AUDIENCE_UBAHN=
4141
AUTH0_CLIENT_ID=
4242
AUTH0_CLIENT_SECRET=
43-
# necessary if you'll utilize email functionality of interviews
44-
INTERVIEW_INVITATION_SENDGRID_TEMPLATE_ID=
45-
INTERVIEW_INVITATION_SENDER_EMAIL=
43+
# If you would like to test Interview Workflow then Config Nylas as per ./docs/guides/Setup-Interview-Workflow-Locally.md
44+
NYLAS_CLIENT_ID=
45+
NYLAS_CLIENT_SECRET=
46+
NYLAS_SCHEDULER_WEBHOOK_BASE_URL=
4647
# Locally deployed services (via docker-compose)
4748
ES_HOST=http://dockerhost:9200
4849
DATABASE_URL=postgres://postgres:postgres@dockerhost:5432/postgres
4950
BUSAPI_URL=http://dockerhost:8002/v5
51+
TAAS_API_BASE_URL=http://localhost:3000/api/v5
5052
# stripe
5153
STRIPE_SECRET_KEY=
5254
CURRENCY=usd
@@ -55,7 +57,7 @@
5557
- Values from this file would be automatically used by many `npm` commands.
5658
- ⚠️ Never commit this file or its copy to the repository!
5759

58-
1. Set `dockerhost` to point the IP address of Docker. Docker IP address depends on your system. For example if docker is run on IP `127.0.0.1` add a the next line to your `/etc/hosts` file:
60+
2. Set `dockerhost` to point the IP address of Docker. Docker IP address depends on your system. For example if docker is run on IP `127.0.0.1` add a the next line to your `/etc/hosts` file:
5961

6062
```
6163
127.0.0.1 dockerhost
@@ -170,6 +172,8 @@
170172
Runs the Topcoder TaaS API using nodemon, so it would be restarted after any of the files is updated.
171173
The Topcoder TaaS API will be served on `http://localhost:3000`.
172174

175+
- 💡 If you would like to test Interview Workflow locally, then follow the guide [How to Setup Interview Workflow Locally](./docs/guides/Setup-Interview-Workflow-Locally.md).
176+
173177
### Working on `taas-es-processor` locally
174178

175179
When you run `taas-apis` locally as per "[Steps to run locally](#steps-to-run-locally)" the [taas-es-processor](https://github.com/topcoder-platform/taas-es-processor) would be run for you automatically together with other services inside the docker container via `npm run services:up`.
@@ -224,7 +228,7 @@ To be able to change and test `taas-es-processor` locally you can follow the nex
224228
| `npm run cov` | Code Coverage Report. |
225229
| `npm run migrate` | Run any migration files which haven't run yet. |
226230
| `npm run migrate:undo` | Revert most recent migration. |
227-
| `npm run demo-payment-scheduler` | Create 1000 Work Periods Payment records in with status "scheduled" and various "amount" |
231+
| `npm run demo-email-notifications` | Listen to the Kafka events of email notification and render all the emails into `./out` folder. See [its readme](scripts/demo-email-notifications/README.md) for details. |
228232
| `npm run emsi-mapping` | mapping EMSI tags to topcoder skills |
229233
230234
## Import and Export data
@@ -294,6 +298,29 @@ docker exec -it tc-taas-kafka /opt/kafka/bin/kafka-console-producer.sh --broker-
294298

295299
- Enter or copy/paste the message into the console after starting this command.
296300

301+
## Email Notifications
302+
303+
We have various email notifications. For example many emails are sent to support Interview Scheduling Workflow. All email templates are placed inside the [.data/notification-email-templates](./data/notification-email-templates/) folder.
304+
305+
### Add a new Email Notification
306+
307+
To add a new email notification:
308+
309+
0. Each email notification need to have a unique topic identifier of the shape `taas.notification.{notification-type}`. Where `{notification-type}` is unique for each email notification type.
310+
1. Create a new HTML template inside folder [.data/notification-email-templates](./data/notification-email-templates/). (You may duplicate any existent template to reuse existent styles.). Name it the same as topic: `taas.notification.{notification-type}.html`.
311+
2. Create a corresponding config in file [./config/email_template.config.js](./config/email_template.config.js), section `notificationEmailTemplates`.
312+
3. Name environment variable the same as topic, but **uppercase** and replace all special symbols to `_` and add suffix `_SENDGRID_TEMPLATE_ID`:
313+
- For example topic `taas.notification.job-candidate-resume-viewed` would have corresponding environment variable
314+
`TAAS_NOTIFICATION_JOB_CANDIDATE_RESUME_VIEWED_SENDGRID_TEMPLATE_ID`.
315+
4. When deploying to DEV/PROD someone would have to create a new Sendgird template and fill inside Sendgrid UI subject and email HTML template by copy/pasting HTML file from the repo. And then set environment variable with the value of template if provided by Sendgrid.
316+
317+
### Test/Render Email Notifications Locally
318+
319+
To test and render email notification locally run a special script `npm run demo-email-notifications`. Before running it, follow it's [README](./scripts/demo-email-notifications/README.md) as you would have to set some additional environment variables first (add them into `.env` file).
320+
321+
- This script first would update demo data to create some situations to trigger notifications.
322+
- And it would listen to Kafka events and render email notification into `./out` folder.
323+
297324
## DB Migration
298325
299326
- `npm run migrate`: run any migration files which haven't run yet.
@@ -303,10 +330,7 @@ Configuration for migration is at `./config/config.json`.
303330

304331
The following parameters can be set in the config file or via env variables:
305332

306-
- `username`: set via env `DB_USERNAME`; datebase username
307-
- `password`: set via env `DB_PASSWORD`; datebase password
308-
- `database`: set via env `DB_NAME`; datebase name
309-
- `host`: set via env `DB_HOST`; datebase host name
333+
- `url`: set via env `DATABASE_URL`; datebase url
310334

311335
## Testing
312336

@@ -341,6 +365,7 @@ When we add, update or delete models and/or endpoints we have to make sure that
341365
- Test, that when we migrate DB from the previous state using `npm run migrate`, we get exactly the same DB schema as if we create DB from scratch using command `npm run init-db force`.
342366

343367
## EMSI mapping
368+
344369
mapping EMSI tags to topcoder skills
345370
Run `npm run emsi-mapping` to create the mapping file
346371
It will take about 15 minutes to create the mapping file `script/emsi-mapping/emsi-skils-mapping.js`

app-constants.js

Lines changed: 42 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,13 @@ const Scopes = {
5858
CREATE_ROLE: 'create:taas-roles',
5959
UPDATE_ROLE: 'update:taas-roles',
6060
DELETE_ROLE: 'delete:taas-roles',
61-
ALL_ROLE: 'all:taas-roles'
61+
ALL_ROLE: 'all:taas-roles',
62+
// userMeetingSettings
63+
READ_USER_MEETING_SETTINGS: 'read:taas-userMeetingsSettings',
64+
CREATE_USER_MEETING_SETTINGS: 'create:taas-userMeetingsSettings',
65+
UPDATE_USER_MEETING_SETTINGS: 'update:taas-userMeetingsSettings',
66+
ALL_USER_MEETING_SETTINGS: 'all:taas-userMeetingsSettings'
67+
6268
}
6369

6470
// Interview related constants
@@ -69,14 +75,22 @@ const Interviews = {
6975
RequestedForReschedule: 'Requested for reschedule',
7076
Rescheduled: 'Rescheduled',
7177
Completed: 'Completed',
72-
Cancelled: 'Cancelled'
73-
},
74-
// key: template name in x.ai, value: duration
75-
XaiTemplate: {
76-
'interview-30': 30,
77-
'interview-60': 60
78+
Cancelled: 'Cancelled',
79+
Expired: 'Expired'
7880
},
79-
MaxAllowedCount: 3
81+
MaxAllowedCount: 3,
82+
Nylas: {
83+
Days: {
84+
Monday: 'M',
85+
Tuesday: 'T',
86+
Wednesday: 'W',
87+
Thursday: 'R',
88+
Friday: 'F',
89+
Saturday: 'S',
90+
Sunday: 'U'
91+
},
92+
StartEndRegex: /^([0-1]?[0-9]|2[0-3]):[0-5][0-9]$/
93+
}
8094
}
8195

8296
const ChallengeStatus = {
@@ -166,7 +180,23 @@ const JobCandidateStatus = {
166180
INTERVIEW: 'interview'
167181
}
168182

183+
const SearchUsers = {
184+
SEARCH_USERS_PAGE_SIZE: 5
185+
}
186+
187+
// provider which we have to enforce for Nylas Virtual Calendars
188+
const NylasVirtualCalendarProvider = 'nylas'
189+
190+
const ZoomLinkType = {
191+
HOST: 'host',
192+
GUEST: 'guest'
193+
}
194+
195+
// how long to wait for the Interview Webhook Mutes to release (ms)
196+
const InterviewEventHandlerTimeout = 60 * 1000 // 60 seconds
197+
169198
module.exports = {
199+
InterviewEventHandlerTimeout,
170200
UserRoles,
171201
FullManagePermissionRoles,
172202
Scopes,
@@ -181,5 +211,8 @@ module.exports = {
181211
WeeklySurveySwitch,
182212
ActiveWorkPeriodPaymentStatuses,
183213
JobStatus,
184-
JobCandidateStatus
214+
JobCandidateStatus,
215+
SearchUsers,
216+
NylasVirtualCalendarProvider,
217+
ZoomLinkType
185218
}

app.js

Lines changed: 30 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,32 @@ app.use(cors({
2525
// Allow browsers access pagination data in headers
2626
exposedHeaders: ['X-Page', 'X-Per-Page', 'X-Total', 'X-Total-Pages', 'X-Prev-Page', 'X-Next-Page']
2727
}))
28-
app.use(express.json())
28+
app.use((...args) => {
29+
const [req, res, next] = args
30+
// For test nylas webhook, we need raw buffer
31+
// Here i sCustom Middleware to compute rawBody. Unfortunately using
32+
// JSON.stringify(req.body) will remove spaces and newlines, so verification
33+
// will fail. We must add this middleware to ensure we're computing the correct
34+
// signature
35+
if (req.path.match(/\/taas-teams\/nylas-webhooks/i)) {
36+
req.rawBody = ''
37+
req.on('data', (chunk) => (req.rawBody += chunk))
38+
req.on('error', () => res.status(500).send('Error parsing body'))
39+
40+
req.on('end', () => {
41+
// because the stream has been consumed, other parsers like bodyParser.json
42+
// cannot stream the request data and will time out so we must explicitly parse the body
43+
try {
44+
req.body = req.rawBody.length ? JSON.parse(req.rawBody) : {}
45+
next()
46+
} catch (err) {
47+
res.status(500).send('Error parsing body')
48+
}
49+
})
50+
return
51+
}
52+
return express.json()(...args)
53+
})
2954
app.use(express.urlencoded({ extended: true }))
3055
app.set('port', config.PORT)
3156

@@ -100,7 +125,7 @@ const server = app.listen(app.get('port'), () => {
100125
logger.info({ component: 'app', message: `Express server listening on port ${app.get('port')}` })
101126
eventHandlers.init()
102127
// schedule updateCompletedInterviews to run every hour
103-
schedule.scheduleJob('0 0 * * * *', interviewService.updateCompletedInterviews)
128+
schedule.scheduleJob(config.CRON_UPDATE_COMPLETED_INTERVIEWS, interviewService.updateCompletedInterviews)
104129
// schedule sendSurveys
105130
if (WeeklySurveySwitch.ON === config.WEEKLY_SURVEY.SWITCH) {
106131
schedule.scheduleJob(config.WEEKLY_SURVEY.CRON, sendSurveys)
@@ -113,6 +138,9 @@ const server = app.listen(app.get('port'), () => {
113138
schedule.scheduleJob(config.CRON_INTERVIEW_COMPLETED, notificationSchedulerService.sendInterviewCompletedNotifications)
114139
schedule.scheduleJob(config.CRON_POST_INTERVIEW, notificationSchedulerService.sendPostInterviewActionNotifications)
115140
schedule.scheduleJob(config.CRON_UPCOMING_RESOURCE_BOOKING, notificationSchedulerService.sendResourceBookingExpirationNotifications)
141+
142+
schedule.scheduleJob(config.CRON_INTERVIEW_EXPIRED, notificationSchedulerService.sendInterviewExpiredNotifications)
143+
schedule.scheduleJob(config.CRON_INTERVIEW_SCHEDULE_REMINDER, notificationSchedulerService.sendInterviewScheduleReminderNotifications)
116144
})
117145

118146
if (process.env.NODE_ENV === 'test') {

config/config.js

Lines changed: 3 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -4,24 +4,12 @@
44

55
module.exports = {
66
development: {
7-
username: process.env.DB_USERNAME || 'postgres',
8-
password: process.env.DB_PASSWORD || 'postgres',
9-
database: process.env.DB_NAME || 'postgres',
10-
host: process.env.DB_HOST || '127.0.0.1',
11-
dialect: 'postgres'
7+
url: process.env.DATABASE_URL || 'postgres://postgres:postgres@localhost:5432/postgres'
128
},
139
test: {
14-
username: process.env.DB_USERNAME || 'postgres',
15-
password: process.env.DB_PASSWORD || 'postgres',
16-
database: process.env.DB_NAME || 'postgres',
17-
host: process.env.DB_HOST || '127.0.0.1',
18-
dialect: 'postgres'
10+
url: process.env.DATABASE_URL || 'postgres://postgres:postgres@localhost:5432/postgres'
1911
},
2012
production: {
21-
username: process.env.DB_USERNAME,
22-
password: process.env.DB_PASSWORD,
23-
database: process.env.DB_NAME,
24-
host: process.env.DB_HOST,
25-
dialect: 'postgres'
13+
url: process.env.DATABASE_URL
2614
}
2715
}

0 commit comments

Comments
 (0)