Initial commit

This commit is contained in:
Jeremy Dorn
2021-05-07 18:13:37 -05:00
commit 4f8dba93e8
359 changed files with 50568 additions and 0 deletions

5
.eslintignore Normal file
View File

@@ -0,0 +1,5 @@
# build artifacts
dist
coverage
# data definition files
**/*.d.ts

53
.eslintrc Normal file
View File

@@ -0,0 +1,53 @@
{
"env": {
"node": true,
"browser": true,
"es6": true
},
"settings": {
"react": {
"version": "detect"
}
},
"parser": "@typescript-eslint/parser",
"plugins": [
"react",
"@typescript-eslint",
"prettier",
"@next/eslint-plugin-next",
],
"extends": [
"eslint:recommended",
"plugin:react/recommended",
"plugin:@typescript-eslint/eslint-recommended",
"plugin:@typescript-eslint/recommended",
"plugin:prettier/recommended",
"plugin:@next/eslint-plugin-next/recommended"
],
"parserOptions": {
"ecmaFeatures": {
"jsx": true
},
"ecmaVersion": 2018,
"sourceType": "module"
},
"globals": {
"Atomics": "readonly",
"SharedArrayBuffer": "readonly"
},
"rules": {
"@next/next/no-html-link-for-pages": ["warn", "./packages/front-end/pages"],
"@typescript-eslint/explicit-module-boundary-types": "off",
"@typescript-eslint/explicit-function-return-type": "off",
"@typescript-eslint/no-explicit-any": 1,
"@typescript-eslint/no-inferrable-types": [
"warn",
{
"ignoreParameters": true
}
],
"@typescript-eslint/no-unused-vars": "warn",
"react/react-in-jsx-scope": "off",
"react/prop-types": "off"
}
}

91
.github/workflows/ci.yml vendored Normal file
View File

@@ -0,0 +1,91 @@
name: CI
on: [push]
jobs:
ci:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 2
- name: Use Node.js 12.x
uses: actions/setup-node@v1
with:
node-version: 12.x
- name: Get yarn cache directory
id: yarn-cache
run: |
echo "::set-output name=dir::$(yarn cache dir)"
- uses: actions/cache@v1
with:
path: ${{ steps.yarn-cache.outputs.dir }}
key: ${{ runner.os }}-node-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
${{ runner.os }}-node-
- name: install dependencies
run: |
yarn
env:
CI: true
- name: lint, typecheck
run: |
yarn lint
yarn type-check
- name: test
run: |
cp .env.example .env.local
yarn test
rm .env.local
rm -rf coverage
changes:
runs-on: ubuntu-latest
needs: ci
steps:
- name: has back-end changes
run: |
# See if any relevant back-end changes were made
FILE_CHANGES=$(git diff --name-only HEAD^ HEAD)
YARN_LOCK_CHANGES=$(echo "$FILE_CHANGES" | grep yarn.lock)
BACK_END_CHANGES=$(echo "$FILE_CHANGES" | grep packages/back-end)
HAS_BACKEND_CHANGES=$(if [ -z "${BACK_END_CHANGES}${YARN_LOCK_CHANGES}" ]; then echo "false"; else echo "true"; fi)
echo "::set-output name=backend::${HAS_BACKEND_CHANGES}"
back-end:
runs-on: ubuntu-latest
needs: [ci, changes]
if: github.ref == 'refs/heads/main' && needs.changes.outputs.backend == 'true'
steps:
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- name: Login to Amazon ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@v1
- name: Build, tag, and push image to Amazon ECR
env:
ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }}
ECR_REPOSITORY: api
IMAGE_TAG: ${{ github.sha }}
run: |
# Move into the back-end package and add the root yarn.lock
cp yarn.lock packges/back-end/yarn.lock
cd packages/back-end
# Build and push the docker image
docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:latest -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG .
docker push $ECR_REGISTRY/$ECR_REPOSITORY
# Cleanup
rm yarn.lock
- name: Deploy to ECS
run:
aws ecs update-service --cluster prod-api --service prod-api --force-new-deployment --region us-east-1

12
.gitignore vendored Normal file
View File

@@ -0,0 +1,12 @@
coverage
node_modules
*.log
.idea
.env.local
dist
.next
out
.DS_Store
npm-debug.log*
yarn-debug.log*
yarn-error.log*

4
.prettierignore Normal file
View File

@@ -0,0 +1,4 @@
.next
node_modules
public
coverage

21
LICENSE Normal file
View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2021 Growth Book, LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

94
README.md Normal file
View File

@@ -0,0 +1,94 @@
<p align="center"><img src="https://www.growthbook.io/logos/growthbook-logo@2x.png" width="400px" /></p>
# Growth Book - The Open Source A/B Testing Platform
This repo is the actual Growth Book application where you connect to your data sources, define metrics, and analyze experiment results.
In addition, there are **client libraries** to help you implement A/B tests in [React](https://github.com/growthbook/growthbook-react), [Javascript](https://github.com/growthbook/growthbook-js), [PHP](https://github.com/growthbook/growthbook-php), and [Ruby](https://github.com/growthbook/growthbook-ruby) with more coming soon.
## Major Features
- Queries multiple data sources (Snowflake, Redshift, BigQuery, Mixpanel, Postgres, Athena, and Google Analytics)
- Bayesian statistics engine with support for binomial, count, duration, and revenue metrics
- Drill down into A/B test results (e.g. by browser, country, etc.)
- Lightweight idea board and prioritization framework
- Document everything! (upload screenshots, add markdown comments, and more)
- Automated email alerts when tests become significant
## Community
Join [our Growth Book Users Slack community](https://join.slack.com/t/growthbookusers/shared_invite/zt-oiq9s1qd-dHHvw4xjpnoRV1QQrq6vUg) if you need help, want to chat, or are thinking of a new feature. We're here to help - and to make Growth Book even better.
## Requirements
- NodeJS 12.x or higher (https://nodejs.org/en/)
- Yarn (`sudo npm install -g yarn`)
- MongoDB 3.2 or higher
- A compatible data source (Snowflake, Redshift, BigQuery, Mixpanel, Postgres, Athena, or Google Analytics)
- AWS S3 bucket and access keys that allow writing (for image/file uploads)
- An email provider (Sendgrid, Mailgun, etc.) for sending invites, forgot password emails, etc.
- Google OAuth keys (only if using Google Analytics as a data source)
Don't want to install, deploy, and maintain Growth Book on your own? Let us do it for you at https://www.growthbook.io
## Setup
```sh
# Install dependencies
yarn
# Create .env.local files for the front-end and back-end
yarn init:dev
```
Edit the default values in `packages/back-end/.env.local` and `packages/front-end/.env.local` as needed.
### MongoDB
To quickly get a local MongoDB instance running for development, you can use docker:
```sh
docker run -d --name mongo \
-e MONGO_INITDB_ROOT_USERNAME=root \
-e MONGO_INITDB_ROOT_PASSWORD=password \
mongo
```
For production, we recommend using MongoDB Atlas or another fully managed service.
The Growth Book app only stores meta info and aggregate stats, so the size of MongoDB should stay comfortably within the free tier for most deployments.
### Email
Growth Book sends a few transactional emails (team member invites, forgot password, etc.).
You can configure the email server using environment variables. Here's an example for Sendgrid:
```
EMAIL_ENABLED=true
EMAIL_HOST=smtp.sendgrid.net
EMAIL_PORT=465
EMAIL_HOST_USER=apikey
EMAIL_HOST_PASSWORD=SG.123abc
EMAIL_USE_TLS=true
EMAIL_FROM=noreply@example.com
# Site Manager is alerted when a new organization is created
SITE_MANAGER_EMAIL=admin@example.com
```
## Usage
This is a monorepo with 2 packages - `back-end` and `front-end`. For simplicity, we've added helper scripts at the top level:
- `yarn dev` - Start dev servers with hot reloading. Back-End is at http://localhost:3100 and the Front-End is at http://localhost:3000.
- `yarn lint` - Run eslint and auto-fix errors when possible
- `yarn pretty` - Run prettier across the entire codebase
- `yarn type-check` - Check for typescript compile errors
- `yarn test` - Run the test suites
### Production
- `yarn build` - Build the production bundles (outputs to `dist/` in each package directory)
- `yarn start` - Serve the production bundles (same ports as `yarn dev`)
## License
This project uses the MIT license. The core Growth Book app will always remain free, although we may add some commercial enterprise add-ons in the future.

58
package.json Normal file
View File

@@ -0,0 +1,58 @@
{
"name": "growthbook-app",
"repository": "https://github.com/growthbook/growthbook-app.git",
"private": true,
"scripts": {
"lint": "eslint './**/*.{ts,tsx,js,jsx}' --fix --max-warnings 0",
"pretty": "prettier --write ./**/*.{json,css,scss,md}",
"type-check": "wsrun type-check",
"test": "wsrun test",
"dev": "wsrun dev",
"build": "wsrun build",
"start": "wsrun start",
"init:dev": "wsrun init:dev"
},
"workspaces": [
"packages/*"
],
"dependencies": {},
"devDependencies": {
"@next/eslint-plugin-next": "^10.2.0",
"@types/chai": "^4.2.3",
"@types/concurrently": "^4.1.0",
"@types/eslint": "^6.1.1",
"@types/jest": "^24.0.23",
"@typescript-eslint/eslint-plugin": "^4.22.1",
"@typescript-eslint/parser": "^4.22.1",
"chai": "^4.2.0",
"concurrently": "^5.0.0",
"eslint": "^7.1.0",
"eslint-config-prettier": "^6.11.0",
"eslint-plugin-prettier": "^3.1.3",
"eslint-plugin-react": "^7.20.0",
"husky": "^4.2.5",
"jest": "^26.6.3",
"lint-staged": "^10.2.7",
"nodemon": "^1.19.2",
"prettier": "^2.2.1",
"supertest": "^4.0.2",
"ts-jest": "^26.5.6",
"ts-node": "^9.1.1",
"typescript": "^4.2.4",
"wsrun": "^5.2.4"
},
"husky": {
"hooks": {
"pre-commit": "yarn type-check && lint-staged"
}
},
"lint-staged": {
"./**/*.{json,css,scss,md}": [
"prettier --write"
],
"./**/*.{js,ts}": [
"yarn lint"
]
},
"license": "MIT"
}

View File

@@ -0,0 +1,41 @@
###################################
######## Required Settings ########
###################################
# Auth signing key (use a long random string)
JWT_SECRET=
# Data source credential encryption key (use a long random string)
ENCRYPTION_KEY=
# S3 (screenshot/file uploads)
S3_BUCKET=
S3_REGION=us-east-1
# CORS configuration
APP_ORIGIN=http://localhost:3000
# MongoDB
MONGODB_URI=mongodb://root:password@localhost:27017/
###################################
######## Optional Settings ########
###################################
# Override the default S3 domain (https://${S3_BUCKET}.s3.amazonaws.com/)
S3_DOMAIN=
# Required to send emails from the app
EMAIL_ENABLED=true
EMAIL_HOST=
EMAIL_PORT=587
EMAIL_HOST_USER=
EMAIL_HOST_PASSWORD=
EMAIL_USE_TLS=true
EMAIL_FROM=
# Site Manager is alerted when a new member joins the organization
SITE_MANAGER_EMAIL=
# Required if using Google Analytics as a data source
GOOGLE_OAUTH_CLIENT_ID=
GOOGLE_OAUTH_CLIENT_SECRET=

View File

@@ -0,0 +1,12 @@
FROM node:alpine
WORKDIR /usr/local/src/app
COPY ./dist /usr/local/src/app/dist
COPY ./package.json /usr/local/src/app/package.json
COPY ./yarn.lock /usr/local/src/app/yarn.lock
RUN yarn install --frozen-lockfile --production=true --ignore-optional \
&& rm -rf /usr/local/share/.cache/yarn
CMD ["yarn","start"]

View File

@@ -0,0 +1,13 @@
module.exports = {
globals: {
"ts-jest": {
tsconfig: "tsconfig.json",
},
},
moduleFileExtensions: ["ts", "js"],
transform: {
"^.+\\.(ts|tsx)$": "ts-jest",
},
testMatch: ["**/test/**/*.test.(ts|js)"],
testEnvironment: "node",
};

View File

@@ -0,0 +1,85 @@
{
"name": "back-end",
"version": "0.0.1",
"private": true,
"scripts": {
"copy-email-templates": "mkdir -p dist/templates && cp -r src/templates/* dist/templates",
"dev": "yarn copy-email-templates && ts-node-dev src/server.ts",
"build": "tsc && yarn copy-email-templates",
"start": "node dist/server.js",
"test": "jest --forceExit --coverage --verbose --detectOpenHandles",
"type-check": "tsc --pretty --noEmit",
"generate-dummy-data": "node --stack-size=8192 ./test/data-generator/data-generator.js",
"import-dummy-data": "node --stack-size=8192 ./test/data-generator/import.js",
"init:dev": "cp -n .env.example .env.local"
},
"dependencies": {
"@google-cloud/bigquery": "^5.5.0",
"@slack/web-api": "^5.14.0",
"ab-designer": "^0.4.0",
"asn1.js": "^5.4.1",
"async": "^3.1.0",
"aws-sdk": "^2.696.0",
"body-parser": "^1.19.0",
"connect-mongo": "^3.0.0",
"cookie-parser": "^1.4.5",
"cors": "^2.8.5",
"crypto-js": "^4.0.0",
"date-fns": "^2.15.0",
"dotenv": "^8.2.0",
"errorhandler": "^1.5.1",
"express": "^4.17.1",
"express-async-handler": "^1.1.4",
"express-jwt": "^5.3.3",
"express-validator": "^6.2.0",
"googleapis": "^59.0.0",
"jsonwebtoken": "^8.5.1",
"jstat": "^1.9.3",
"jwks-rsa": "^1.8.0",
"lodash": "^4.17.15",
"md5": "^2.2.1",
"mongoose": "^5.7.5",
"node-fetch": "^2.6.1",
"node-gzip": "^1.1.2",
"nodemailer": "^6.6.0",
"nunjucks": "^3.2.3",
"objects-to-csv": "^1.3.6",
"pg": "^8.3.0",
"pino-http": "^5.3.0",
"snowflake-promise": "^4.1.0",
"sql-formatter": "^2.3.3",
"stripe": "^8.106.0",
"uniqid": "^5.2.0",
"winston": "^3.3.3"
},
"devDependencies": {
"@types/async": "^3.0.2",
"@types/body-parser": "^1.17.1",
"@types/concurrently": "^4.1.0",
"@types/cookie-parser": "^1.4.2",
"@types/cors": "^2.8.6",
"@types/crypto-js": "^3.1.47",
"@types/errorhandler": "^0.0.32",
"@types/express": "^4.17.1",
"@types/express-jwt": "^0.0.42",
"@types/jsonwebtoken": "^8.5.1",
"@types/lodash": "^4.14.157",
"@types/md5": "^2.2.0",
"@types/node": "^12.7.8",
"@types/node-fetch": "^2.5.8",
"@types/node-gzip": "^1.1.0",
"@types/nodemailer": "^6.4.1",
"@types/nunjucks": "^3.1.4",
"@types/pg": "^7.14.4",
"@types/pino-http": "^5.0.5",
"@types/request": "^2.48.3",
"@types/request-promise": "^4.1.44",
"@types/sql-formatter": "^2.3.0",
"@types/supertest": "^2.0.9",
"@types/uniqid": "^5.2.0",
"mongodb": "^3.6.3",
"ts-node-dev": "^1.1.6",
"typescript": "^4.2.4"
},
"license": "MIT"
}

View File

@@ -0,0 +1,412 @@
import bodyParser from "body-parser";
import cookieParser from "cookie-parser";
import express, { RequestHandler, ErrorRequestHandler } from "express";
import "./util/logger";
import mongoInit from "./init/mongo";
import cors from "cors";
import { AuthRequest } from "./types/AuthRequest";
import { APP_ORIGIN, CORS_ORIGIN_REGEX, IS_CLOUD } from "./util/secrets";
import {
getExperimentConfig,
getVisualDesignerScript,
} from "./controllers/config";
import asyncHandler from "express-async-handler";
import pino from "pino-http";
import { verifySlackRequestSignature } from "./services/slack";
import { getJWTCheck, processJWT } from "./services/auth";
// Controllers
import * as authController from "./controllers/auth";
import * as organizationsController from "./controllers/organizations";
import * as experimentsController from "./controllers/experiments";
import * as learningsController from "./controllers/learnings";
import * as ideasController from "./controllers/ideas";
import * as presentationController from "./controllers/presentations";
import * as discussionsController from "./controllers/discussions";
import * as adminController from "./controllers/admin";
import * as stripeController from "./controllers/stripe";
import * as segmentsController from "./controllers/segments";
import * as dimensionsController from "./controllers/dimensions";
import * as slackController from "./controllers/slack";
// Wrap every controller function in asyncHandler to catch errors properly
function wrapController(controller: Record<string, RequestHandler>): void {
Object.keys(controller).forEach((key) => {
if (typeof controller[key] === "function") {
controller[key] = asyncHandler(controller[key]);
}
});
}
wrapController(authController);
wrapController(organizationsController);
wrapController(experimentsController);
wrapController(learningsController);
wrapController(ideasController);
wrapController(presentationController);
wrapController(discussionsController);
wrapController(adminController);
wrapController(stripeController);
wrapController(segmentsController);
wrapController(dimensionsController);
wrapController(slackController);
const app = express();
app.set("port", process.env.PORT || 3100);
app.use(cookieParser());
// Health check route (does not require JWT or cors)
app.get("/healthcheck", (req, res) => {
// TODO: more robust health check?
res.status(200).json({
status: 200,
healthy: true,
});
});
// Visual Designer js file (does not require JWT or cors)
app.get("/visual-designer.js", getVisualDesignerScript);
const loggerRedact = {
paths: [
"req.headers.authorization",
'req.headers["if-none-match"]',
'req.headers["cache-control"]',
'req.headers["upgrade-insecure-requests"]',
"req.headers.cookie",
"req.headers.connection",
'req.headers["accept"]',
'req.headers["accept-encoding"]',
'req.headers["accept-language"]',
'req.headers["sec-fetch-site"]',
'req.headers["sec-fetch-mode"]',
'req.headers["sec-fetch-dest"]',
"res.headers.etag",
'res.headers["x-powered-by"]',
'res.headers["access-control-allow-credentials"]',
'res.headers["access-control-allow-origin"]',
],
remove: true,
};
const preAuthLogger = pino({
redact: loggerRedact,
});
// Stripe webhook (needs raw body)
app.post(
"/stripe/webhook",
bodyParser.raw({
type: "application/json",
}),
preAuthLogger,
stripeController.postWebhook
);
// Slack app (body is urlencoded)
app.post(
"/ideas/slack",
bodyParser.urlencoded({
extended: true,
verify: verifySlackRequestSignature,
}),
preAuthLogger,
slackController.postIdeas
);
app.use(bodyParser.json());
export async function init() {
await mongoInit();
}
// Config route (does not require JWT, does require cors with origin = *)
app.get(
"/config/:key",
cors({
credentials: false,
origin: "*",
}),
preAuthLogger,
getExperimentConfig
);
// Accept cross-origin requests from the frontend app
const origins: (string | RegExp)[] = [APP_ORIGIN];
if (CORS_ORIGIN_REGEX) {
origins.push(CORS_ORIGIN_REGEX);
}
app.use(
cors({
credentials: true,
origin: origins,
})
);
// Pre-auth requests
// Managed cloud deployment uses Auth0 instead
if (!IS_CLOUD) {
app.post("/auth/refresh", preAuthLogger, authController.postRefresh);
app.post("/auth/login", preAuthLogger, authController.postLogin);
app.post("/auth/logout", preAuthLogger, authController.postLogout);
app.post("/auth/register", preAuthLogger, authController.postRegister);
app.post("/auth/forgot", preAuthLogger, authController.postForgotPassword);
app.get("/auth/reset/:token", preAuthLogger, authController.getResetPassword);
app.post(
"/auth/reset/:token",
preAuthLogger,
authController.postResetPassword
);
}
// All other routes require a valid JWT
app.use(getJWTCheck());
// Add logged in user props to the request
app.use(processJWT);
const logger = pino({
autoLogging: process.env.NODE_ENV === "production",
redact: loggerRedact,
reqCustomProps: (req: AuthRequest) => ({
userId: req.userId,
admin: !!req.admin,
}),
});
app.use(logger);
// Event Tracking
//app.get("/events", eventsController.getEvents);
//app.post("/events/sync", eventsController.postEventsSync);
// Logged-in auth requests
// Managed cloud deployment uses Auth0 instead
if (!IS_CLOUD) {
app.post("/auth/change-password", authController.postChangePassword);
}
// Organizations
app.get("/user", organizationsController.getUser);
// Every other route requires a userId to be set
app.use(
asyncHandler(async (req: AuthRequest, res, next) => {
if (!req.userId) {
throw new Error("Must be authenticated. Try refreshing the page.");
}
next();
})
);
// Organization and Settings
app.put("/user/name", organizationsController.putUserName);
app.get("/user/watching", organizationsController.getWatchedExperiments);
app.get("/activity", organizationsController.getActivityFeed);
app.get("/history/:type/:id", organizationsController.getHistory);
app.get("/organization", organizationsController.getOrganization);
app.post("/organization", organizationsController.signup);
app.put("/organization", organizationsController.putOrganization);
app.post("/invite/accept", organizationsController.postInviteAccept);
app.post("/invite", organizationsController.postInvite);
app.post("/invite/resend", organizationsController.postInviteResend);
app.delete("/invite", organizationsController.deleteInvite);
app.get("/members", organizationsController.getUsers);
app.delete("/member/:id", organizationsController.deleteMember);
app.put("/member/:id/role", organizationsController.putMemberRole);
app.get("/tags", organizationsController.getTags);
app.post("/oauth/google", organizationsController.postGoogleOauthRedirect);
app.post("/subscription/start", stripeController.postStartTrial);
app.post("/subscription/manage", stripeController.postCreateBillingSession);
app.get("/queries/:ids", organizationsController.getQueries);
// Learnings
app.get("/learnings", learningsController.getLearnings);
app.post("/learnings", learningsController.postLearnings);
app.get("/learning/:id", learningsController.getLearning);
app.post("/learning/:id", learningsController.postLearning);
app.delete("/learning/:id", learningsController.deleteLearning);
app.post("/learning/:id/vote", learningsController.postVote);
// Ideas
app.get("/ideas", ideasController.getIdeas);
app.post("/ideas", ideasController.postIdeas);
app.get("/idea/:id", ideasController.getIdea);
app.post("/idea/:id", ideasController.postIdea);
app.delete("/idea/:id", ideasController.deleteIdea);
app.post("/idea/:id/vote", ideasController.postVote);
app.post("/ideas/impact", ideasController.getEstimatedImpact);
app.post("/ideas/estimate/manual", ideasController.postEstimatedImpactManual);
// Metrics
app.get("/metrics", experimentsController.getMetrics);
app.post("/metrics", experimentsController.postMetrics);
app.get("/metric/:id", experimentsController.getMetric);
app.put("/metric/:id", experimentsController.putMetric);
app.delete("/metric/:id", experimentsController.deleteMetric);
app.post("/metric/:id/analysis", experimentsController.postMetricAnalysis);
app.get(
"/metric/:id/analysis/status",
experimentsController.getMetricAnalysisStatus
);
app.post(
"/metric/:id/analysis/cancel",
experimentsController.cancelMetricAnalysis
);
// Experiments
app.get("/experiments", experimentsController.getExperiments);
app.post("/experiments", experimentsController.postExperiments);
app.get(
"/experiments/frequency/month/:num",
experimentsController.getExperimentsFrequencyMonth
);
app.get("/experiment/:id", experimentsController.getExperiment);
app.get("/experiment/:id/snapshot/:phase", experimentsController.getSnapshot);
app.get(
"/experiment/:id/snapshot/:phase/:dimension",
experimentsController.getSnapshotWithDimension
);
app.post("/experiment/:id/snapshot", experimentsController.postSnapshot);
app.post(
"/experiment/:id/snapshot/:phase/preview",
experimentsController.previewManualSnapshot
);
app.post("/experiment/:id", experimentsController.postExperiment);
app.delete("/experiment/:id", experimentsController.deleteExperiment);
app.post("/experiment/:id/watch", experimentsController.watchExperiment);
app.post("/experiment/:id/unwatch", experimentsController.unwatchExperiment);
app.post("/experiment/:id/phase", experimentsController.postExperimentPhase);
app.post("/experiment/:id/stop", experimentsController.postExperimentStop);
app.post(
"/experiment/:id/upload/:filetype",
experimentsController.postScreenshotUploadUrl
);
app.put(
"/experiment/:id/variation/:variation/screenshot",
experimentsController.addScreenshot
);
app.delete(
"/experiment/:id/variation/:variation/screenshot",
experimentsController.deleteScreenshot
);
app.post(
"/experiment/:id/archive",
experimentsController.postExperimentArchive
);
app.post(
"/experiment/:id/unarchive",
experimentsController.postExperimentUnarchive
);
app.post("/experiments/import", experimentsController.postPastExperiments);
app.get(
"/experiments/import/:id",
experimentsController.getPastExperimentsList
);
app.get(
"/experiments/import/:id/status",
experimentsController.getPastExperimentStatus
);
app.post(
"/experiments/import/:id/cancel",
experimentsController.cancelPastExperiments
);
// Segments and Segment Comparisons
app.get("/segments", segmentsController.getAllSegments);
app.post("/segments", segmentsController.postSegments);
app.put("/segments/:id", segmentsController.putSegment);
app.get("/segments/comparisons", segmentsController.getAllSegmentComparisons);
app.post("/segments/comparisons", segmentsController.postSegmentComparisons);
app.get("/segments/comparison/:id", segmentsController.getSegmentComparison);
app.put("/segments/comparison/:id", segmentsController.putSegmentComparison);
app.get(
"/segments/comparison/:id/status",
segmentsController.getSegmentComparisonStatus
);
app.post(
"/segments/comparison/:id/cancel",
segmentsController.cancelSegmentComparison
);
// Dimensions
app.get("/dimensions", dimensionsController.getAllDimensions);
app.post("/dimensions", dimensionsController.postDimensions);
app.put("/dimensions/:id", dimensionsController.putDimension);
// Reports
/*
app.get("/reports", reportsController.getReports);
app.post("/reports", reportsController.postReports);
app.get("/report/:id", reportsController.getReport);
app.put("/report/:id", reportsController.putReport);
*/
// Data Sources
app.get("/datasources", organizationsController.getDataSources);
app.get("/datasource/:id", organizationsController.getDataSource);
app.post("/datasources", organizationsController.postDataSources);
app.put("/datasource/:id", organizationsController.putDataSource);
app.delete("/datasource/:id", organizationsController.deleteDataSource);
// API keys
app.get("/keys", organizationsController.getApiKeys);
app.post("/keys", organizationsController.postApiKey);
app.delete("/key/:key", organizationsController.deleteApiKey);
// Presentations
app.get("/presentations", presentationController.getPresentations);
app.post("/presentation", presentationController.postPresentation);
app.get("/presentation/:id", presentationController.getPresentation);
app.post("/presentation/:id", presentationController.updatePresentation);
app.delete("/presentation/:id", presentationController.deletePresentation);
// Discussions
app.get(
"/discussion/:parentType/:parentId",
discussionsController.getDiscussion
);
app.post(
"/discussion/:parentType/:parentId",
discussionsController.postDiscussions
);
app.put(
"/discussion/:parentType/:parentId/:index",
discussionsController.putComment
);
app.delete(
"/discussion/:parentType/:parentId/:index",
discussionsController.deleteComment
);
app.get("/discussions/recent/:num", discussionsController.getRecentDiscussions);
app.post("/upload/:filetype", discussionsController.postImageUploadUrl);
// Admin
app.get("/admin/organizations", adminController.getOrganizations);
app.post("/admin/organization/:id/populate", adminController.addSampleData);
// Fallback 404 route if nothing else matches
app.use(function (req, res) {
res.status(404).json({
status: 404,
message: "Route not found",
});
});
// eslint-disable-next-line
const errorHandler: ErrorRequestHandler = (err, req, res, next) => {
const status = err.status || 400;
if (process.env.NODE_ENV === "production" && req.log) {
req.log.error(err);
} else {
console.error(err);
}
res.status(status).json({
status: status,
message: err.message || "An error occurred",
});
};
app.use(errorHandler);
export default app;

View File

@@ -0,0 +1,419 @@
import { AuthRequest } from "../types/AuthRequest";
import { OrganizationModel } from "../models/OrganizationModel";
import { Response } from "express";
import { createDataSource } from "../services/datasource";
import { PostgresConnectionParams } from "../../types/integrations/postgres";
import {
createExperiment,
createMetric,
createSnapshot,
} from "../services/experiments";
import { SegmentModel } from "../models/SegmentModel";
import uniqid from "uniqid";
import { DimensionModel } from "../models/DimensionModel";
import { getSourceIntegrationObject } from "../services/datasource";
import { ExperimentInterface } from "../../types/experiment";
import { createIdea } from "../services/ideas";
import { createImpactEstimate } from "../models/ImpactEstimateModel";
import { createLearning } from "../services/learnings";
import { createPresentation } from "../services/presentations";
import { DataSourceModel } from "../models/DataSourceModel";
import { POSTGRES_TEST_CONN } from "../util/secrets";
export async function getOrganizations(req: AuthRequest, res: Response) {
if (!req.admin) {
return res.status(403).json({
status: 403,
message: "Only admins can get all organizations",
});
}
const organizations = await OrganizationModel.find();
const orgsWithDatasources = await DataSourceModel.distinct("organization");
return res.status(200).json({
status: 200,
organizations: organizations.map((o) => {
return {
...o.toJSON(),
canPopulate: !orgsWithDatasources.includes(o.id),
};
}),
});
}
export async function addSampleData(req: AuthRequest, res: Response) {
if (!req.admin) {
return res.status(403).json({
status: 403,
message: "Only admins can perform this action",
});
}
const { id }: { id: string } = req.params;
const org = await OrganizationModel.findOne({ id });
if (!org) {
throw new Error("Cannot find organization");
}
// Change organization settings (allow all kinds of experiments)
org.settings.implementationTypes = [
"code",
"configuration",
"visual",
"custom",
];
await org.save();
// Add datasource
const dsParams: PostgresConnectionParams = {
defaultSchema: "",
...POSTGRES_TEST_CONN,
};
const datasource = await createDataSource(
org.id,
"Example Warehouse",
"postgres",
dsParams,
{
default: {
userIdColumn: "user_id",
anonymousIdColumn: "user_id",
timestampColumn: "received_at",
},
experiments: {
table: "experiment_viewed",
experimentIdColumn: "experiment_id",
variationColumn: "variation_id",
variationFormat: "index",
},
identifies: {
table: "users",
},
users: {
table: "users",
},
pageviews: {
table: "pages",
urlColumn: "path",
},
}
);
const integration = getSourceIntegrationObject(datasource);
// Define metrics
const signup = await createMetric({
organization: org.id,
datasource: datasource.id,
name: "Signup",
type: "binomial",
table: "signup",
userIdType: "user",
conditions: [],
});
const purchase = await createMetric({
organization: org.id,
datasource: datasource.id,
name: "Purchase",
type: "binomial",
table: "purchase",
userIdType: "user",
conditions: [],
});
const revenuPerUser = await createMetric({
organization: org.id,
datasource: datasource.id,
name: "Revenue per User",
type: "revenue",
table: "purchase",
column: "amount",
userIdType: "user",
conditions: [],
});
const viewedSignup = await createMetric({
organization: org.id,
datasource: datasource.id,
name: "Viewed Signup",
type: "binomial",
table: "viewed_signup",
userIdType: "either",
conditions: [],
});
const pagesPerVisit = await createMetric({
organization: org.id,
datasource: datasource.id,
name: "Pages per Visit",
type: "count",
table: "pages",
userIdType: "either",
conditions: [],
earlyStart: true,
});
const aov = await createMetric({
organization: org.id,
datasource: datasource.id,
name: "Average Order Value",
type: "revenue",
table: "purchase",
column: "amount",
userIdType: "user",
conditions: [],
ignoreNulls: true,
});
const timeOnSite = await createMetric({
organization: org.id,
datasource: datasource.id,
name: "Time on Site",
type: "duration",
table: "sessions",
column: "duration_seconds",
userIdType: "either",
timestampColumn: "date_start",
conditions: [],
earlyStart: true,
});
// Example segment
await SegmentModel.create({
datasource: datasource.id,
name: "Male",
sql:
"SELECT user_id, '2020-01-01 00:00:00'::timestamp as date from users where gender='male'",
targeting: "gender=male",
id: uniqid("seg_"),
dateCreated: new Date(),
dateUpdated: new Date(),
organization: org.id,
});
// Example dimension
await DimensionModel.create({
datasource: datasource.id,
name: "Gender",
sql: "SELECT user_id, gender as value FROM users",
id: uniqid("dim_"),
dateCreated: new Date(),
dateUpdated: new Date(),
organization: org.id,
});
// Import experiments
const yearago = new Date();
yearago.setDate(yearago.getDate() - 365);
const pastExperimentsResult = await integration.runPastExperimentQuery(
integration.getPastExperimentQuery(yearago)
);
const sharedFields: Partial<ExperimentInterface> = {
description: "",
implementation: "code",
hypothesis: "",
tags: [],
datasource: datasource.id,
userIdType: "user",
targetURLRegex: ".*",
status: "stopped",
dateCreated: new Date(),
dateUpdated: new Date(),
organization: org.id,
results: "inconclusive",
conversionWindowDays: 3,
};
const experiments: { [key: string]: Partial<ExperimentInterface> } = {};
pastExperimentsResult.experiments.forEach((imp) => {
if (!experiments[imp.experiment_id]) {
experiments[imp.experiment_id] = {
...sharedFields,
trackingKey: imp.experiment_id,
phases: [
{
coverage: 1,
phase: "main",
// TODO: support uneven variation weights
variationWeights: [0.5, 0.5],
reason: "",
dateStarted: imp.start_date,
dateEnded: imp.end_date,
},
],
};
}
const data = experiments[imp.experiment_id];
if (data.phases[0].dateStarted > imp.start_date) {
data.phases[0].dateStarted = imp.start_date;
}
if (data.phases[0].dateEnded < imp.end_date) {
data.phases[0].dateEnded = imp.end_date;
}
if (imp.experiment_id === "green_buttons") {
data.name = "Google Login";
data.description =
"There's been a lot of research to show that users don't like signing up for lots of different accounts.\n\nWe should try adding the option to login with social providers. Google is most popular with our users, so we want to start with that as a test. It's possible the design we're using for this won't scale to other social providers, but this is more about testing the concept, not the specific design.";
data.hypothesis =
"Allowing people to login with Google will increase our signup rate";
data.activationMetric = viewedSignup.id;
data.userIdType = "anonymous";
data.variations = [
{
name: "Control",
screenshots: [
{
path:
"https://cdn.growthbook.io/org_a919vk7kc59purn/exp_21e16hskhpd19kf/img_1p41rrkhupwkl9.png",
},
],
},
{
name: "Google Login",
screenshots: [
{
path:
"https://cdn.growthbook.io/org_a919vk7kc59purn/exp_21e16hskhpd19kf/img_1p41rrkhupwosz.png",
},
],
},
];
data.results = "won";
data.winner = 1;
data.analysis =
"Metrics up a little bit, but not completely significant. Calling it a winner since it fits our overall product direction.";
data.metrics = [signup.id, timeOnSite.id, pagesPerVisit.id];
} else if (imp.experiment_id === "purchase_cta") {
data.name = "Purchase CTA";
data.description =
"Stripe Checkout puts the dollar amount on their buy button. We know they do a ton of testing, so we should try that as well. Here is their page:\n\n![Stripe Checkout](https://i.stack.imgur.com/lMhQr.png)";
data.hypothesis =
"Adding a dollar amount to the buy button will remove uncertainty from users and cause them to convert at a higher rate.";
data.variations = [
{
name: "Control",
screenshots: [
{
path:
"https://cdn.growthbook.io/org_a919vk7kc59purn/exp_21e16hskhpckzk1/img_1p41rrkhupx408.png",
},
],
},
{
name: "Price in CTA",
screenshots: [
{
path:
"https://cdn.growthbook.io/org_a919vk7kc59purn/exp_21e16hskhpckzk1/img_1p41rrkhupx92w.png",
},
],
},
];
data.metrics = [purchase.id, revenuPerUser.id, aov.id];
data.results = "inconclusive";
} else if (imp.experiment_id === "simple_registration") {
data.name = "Simple Registration";
data.description =
"Our signup form is way longer than our competitors. First and Last name are important to keep our email open and click rates right, but at what cost?\n\n This experiment will tell us how many signups this longer form is costing us. If it's significant, maybe we can figure out a hybrid approach where First and Last name are not required up front, but we prompt for it later.";
data.hypothesis =
"Removing everything except email and password will reduce friction and increase signups.";
data.variations = [
{
name: "Control",
screenshots: [
{
path:
"https://cdn.growthbook.io/org_a919vk7kc59purn/exp_21e16hskhpcphqw/img_1p41rrkhupxr9f.png",
},
],
},
{
name: "Shorter Reg Modal",
screenshots: [
{
path:
"https://cdn.growthbook.io/org_a919vk7kc59purn/exp_21e16hskhpcphqw/img_1p41rrkhupxuup.png",
},
],
},
];
data.userIdType = "anonymous";
data.activationMetric = viewedSignup.id;
data.metrics = [signup.id];
data.results = "dnf";
data.analysis = "Found a bug with the experiment.";
}
});
const evidence: string[] = [];
await Promise.all(
Object.keys(experiments).map(async (key) => {
const data = experiments[key];
if (!data.name) return;
// Create experiment document
const exp = await createExperiment(data);
// Add a few experiments to evidence for an insight
if (["simple_registration", "green_buttons"].includes(data.trackingKey)) {
evidence.push(exp.id);
}
// Refresh results
await createSnapshot(exp, 0, datasource);
})
);
// Example idea
const estimate = await createImpactEstimate(
org.id,
signup.id,
"",
".*",
153.429,
1901.71,
153.429,
"-- Traffic to selected pages\nSELECT\n COUNT(DISTINCT user_id) as users\nFROM\n pages\nWHERE\n received_at >= '2020-11-13 16:26:12'\n AND received_at <= '2020-11-20 16:26:12'\n AND path ~ '.*';\n\n-- Entire site: Signup (binomial)\nWITH metric as (\n SELECT\n 1 as value\n FROM\n pages u\n JOIN signup m ON (\n m.user_id = u.user_id\n AND m.received_at >= u.received_at\n AND m.received_at <= u.received_at + INTERVAL '3 days'\n )\n WHERE\n u.received_at >= '2020-11-13 16:26:12'\n AND u.received_at <= '2020-11-20 16:26:12'\n GROUP BY\n u.user_id\n)\nSELECT\n SUM(value) as total\nFROM\n metric;\n\n-- Selected pages only: Signup (binomial)\nWITH metric as (\n SELECT\n 1 as value\n FROM\n pages u\n JOIN signup m ON (\n m.user_id = u.user_id\n AND m.received_at >= u.received_at\n AND m.received_at <= u.received_at + INTERVAL '3 days'\n )\n WHERE\n u.received_at >= '2020-11-13 16:26:12'\n AND u.received_at <= '2020-11-20 16:26:12'\n AND u.path ~ '.*'\n GROUP BY\n u.user_id\n)\nSELECT\n SUM(value) as value\nFROM\n metric;",
"sql"
);
await createIdea({
text: "Add Facebook Login",
details: "We saw a huge lift when we added Login with Google",
estimateParams: {
estimate: estimate.id,
improvement: 10,
numVariations: 2,
userAdjustment: 100,
},
experimentLength: 18,
impactScore: 38,
votes: [],
tags: [],
organization: org.id,
userId: "growthbook",
userName: "Example User",
});
// Example insight
await createLearning({
text: "Our users hate long forms",
details:
"Whenever we try shortening forms or providing shortcuts to users, they respond really well.",
evidence: evidence.map((id) => ({ experimentId: id })),
organization: org.id,
status: "accepted",
tags: [],
votes: [],
userId: "growthbook",
});
// Example presentation
await createPresentation({
title: "A/B Test Review",
experimentIds: evidence,
organization: org.id,
description: "",
options: {},
});
res.json({
status: 200,
});
}

View File

@@ -0,0 +1,226 @@
import { Request, Response } from "express";
import jwt from "jsonwebtoken";
import {
createRefreshToken,
deleteRefreshToken,
getUserIdFromAuthRefreshToken,
} from "../models/AuthRefreshModel";
import {
createForgotPasswordToken,
deleteForgotPasswordToken,
getUserIdFromForgotPasswordToken,
} from "../models/ForgotPasswordModel";
import { validatePasswordFormat } from "../services/auth";
import { getEmailFromUserId } from "../services/organizations";
import {
createUser,
getUserByEmail,
getUserById,
updatePassword,
verifyPassword,
} from "../services/users";
import { AuthRequest } from "../types/AuthRequest";
import { JWT_SECRET } from "../util/secrets";
function generateJWT(userId: string) {
return jwt.sign(
{
scope: "profile openid email",
},
JWT_SECRET,
{
algorithm: "HS256",
audience: "https://api.growthbook.io",
issuer: "https://api.growthbook.io",
subject: userId,
// 30 minutes
expiresIn: 1800,
}
);
}
async function successResponse(req: Request, res: Response, userId: string) {
const token = generateJWT(userId);
// Create a refresh token
await createRefreshToken(req, res, userId);
return res.status(200).json({
status: 200,
token,
});
}
export async function postRefresh(req: Request, res: Response) {
// Look for refresh token header
const refreshToken = req.cookies["AUTH_REFRESH_TOKEN"];
if (!refreshToken) {
return res.json({
status: 200,
authenticated: false,
});
}
const userId = await getUserIdFromAuthRefreshToken(refreshToken);
if (!userId) {
return res.json({
status: 200,
authenticated: false,
});
}
const user = await getUserById(userId);
const token = generateJWT(userId);
return res.json({
status: 200,
authenticated: true,
token,
email: user?.email || "",
});
}
export async function postLogin(req: Request, res: Response) {
const { email, password }: { email: string; password: string } = req.body;
validatePasswordFormat(password);
const user = await getUserByEmail(email);
if (!user) {
return res.status(400).json({
status: 400,
message: "Could not find account with that email address",
});
}
const valid = verifyPassword(user, password);
if (!valid) {
return res.status(400).json({
status: 400,
message: "Invalid password",
});
}
return successResponse(req, res, user.id);
}
export async function postLogout(req: Request, res: Response) {
await deleteRefreshToken(req, res);
res.status(200).json({
status: 200,
});
}
export async function postRegister(req: Request, res: Response) {
const {
email,
name,
password,
}: { email: string; name: string; password: string } = req.body;
validatePasswordFormat(password);
// TODO: validate email and name
const existingUser = await getUserByEmail(email);
if (existingUser) {
// Try to login to existing account
const valid = verifyPassword(existingUser, password);
if (valid) {
return successResponse(req, res, existingUser.id);
}
return res.status(400).json({
status: 400,
message: "That email address is already registered.",
});
}
// Create new account
const user = await createUser(name, email, password);
return successResponse(req, res, user.id);
}
export async function postForgotPassword(req: Request, res: Response) {
const { email }: { email: string } = req.body;
await createForgotPasswordToken(email);
res.status(200).json({
status: 200,
});
}
export async function getResetPassword(req: Request, res: Response) {
const { token } = req.params;
if (!token) {
throw new Error("Invalid password reset token.");
}
const userId = await getUserIdFromForgotPasswordToken(token);
if (!userId) {
throw new Error("Invalid password reset token.");
}
const email = await getEmailFromUserId(userId);
if (!email) {
throw new Error("Could not find user for that password reset token.");
}
res.status(200).json({
status: 200,
email,
});
}
export async function postResetPassword(req: Request, res: Response) {
const { token } = req.params;
const { password }: { password: string } = req.body;
if (!token) {
throw new Error("Invalid password reset token.");
}
const userId = await getUserIdFromForgotPasswordToken(token);
if (!userId) {
throw new Error("Invalid password reset token.");
}
const email = await getEmailFromUserId(userId);
if (!email) {
throw new Error("Could not find user for that password reset token.");
}
await updatePassword(userId, password);
await deleteForgotPasswordToken(token);
res.status(200).json({
status: 200,
email,
});
}
export async function postChangePassword(req: AuthRequest, res: Response) {
const {
currentPassword,
newPassword,
}: {
currentPassword: string;
newPassword: string;
} = req.body;
const user = await getUserById(req.userId);
const valid = await verifyPassword(user, currentPassword);
if (!valid) {
throw new Error("Current password is incorrect");
}
await updatePassword(user.id, newPassword);
res.status(200).json({
status: 200,
});
}

View File

@@ -0,0 +1,230 @@
import { Request, Response } from "express";
import { getExperimentsByOrganization } from "../services/experiments";
import { lookupOrganizationByApiKey } from "../services/apiKey";
import { SegmentModel } from "../models/SegmentModel";
import fs from "fs";
import path from "path";
import { APP_ORIGIN } from "../util/secrets";
import { ExperimentInterface } from "../../types/experiment";
type VariationInfo = {
key?: string;
weight?: number;
data?: {
[key: string]: unknown;
};
dom?: {
selector: string;
action: "set" | "append" | "remove";
attribute: string;
value: string;
}[];
css?: string;
};
type Experiment = {
key: string;
status: "draft" | "running" | "stopped";
anon: boolean;
auto: boolean;
variations: number | VariationInfo[];
force?: number;
coverage?: number;
targeting?: string[];
url?: string;
// @deprecated
weights?: number[];
// @deprecated
data?: { [key: string]: unknown[] };
};
type ConfigResponse = {
status: 200;
experiments: Experiment[];
};
type ErrorResponse = {
status: 400;
error: string;
};
export function canAutoAssignExperiment(
experiment: ExperimentInterface
): boolean {
if (!experiment.targetURLRegex) return false;
return (
experiment.variations.filter((v) => v.dom?.length > 0 || v.css?.length > 0)
.length > 0
);
}
export async function getExperimentConfig(
req: Request<{ key: string }>,
res: Response<ConfigResponse | ErrorResponse>
) {
const { key } = req.params;
try {
const organization = await lookupOrganizationByApiKey(key);
if (!organization) {
return res.status(400).json({
status: 400,
error: "Invalid API key",
});
}
const experiments = await getExperimentsByOrganization(organization);
// If experiments are targeted to specific segments
const segmentIds = new Set<string>();
experiments.forEach((e) => {
if (e.segment) {
segmentIds.add(e.segment);
}
});
const segmentMap = new Map<string, string[]>();
if (segmentIds.size > 0) {
const segments = await SegmentModel.find({
id: { $in: Array.from(segmentIds.values()) },
organization,
});
segments.forEach((s) => {
if (s.targeting) {
segmentMap.set(
s.id,
s.targeting.split("\n").map((s) => s.trim())
);
}
});
}
const list: Experiment[] = [];
experiments.forEach((exp) => {
if (exp.archived) {
return;
}
const key = exp.trackingKey || exp.id;
let data: { [key: string]: unknown[] };
try {
data = exp.data?.length > 2 ? JSON.parse(exp.data) : undefined;
} catch (e) {
// Bad data
}
let targeting = exp.targeting
? exp.targeting.split("\n").map((s) => s.trim())
: [];
if (exp.segment) {
targeting = targeting.concat(segmentMap.get(exp.segment) || []);
}
const phase = exp.phases[exp.phases.length - 1];
if (phase && phase.targeting && exp.status === "running") {
targeting = targeting.concat(
phase.targeting.split("\n").map((s) => s.trim())
);
}
targeting = targeting.filter((t) => t.length > 0);
const canAutoAssign = canAutoAssignExperiment(exp);
const experimentInfo: Experiment = {
key,
status: exp.status,
anon: exp.userIdType !== "user",
targeting: targeting.length ? targeting : undefined,
coverage: phase?.coverage,
auto: (canAutoAssign && exp.autoAssign) || false,
url: exp.targetURLRegex || undefined,
variations: exp.variations.map((v, i) => {
const info: VariationInfo = {};
if (data) {
Object.keys(data).forEach((k) => {
if (data[k]?.[i]) {
info.data = info.data || {};
info.data[k] = data[k][i];
}
});
}
if (v.key) {
info.key = v.key;
}
if (v.css) {
info.css = v.css;
}
if (v.dom && v.dom.length > 0) {
info.dom = v.dom;
}
if (phase && phase.variationWeights) {
info.weight = phase.variationWeights[i] || 0;
}
return info;
}),
// TODO: remove once all the SDKs are updated to use the new variationInfos array
weights: phase?.variationWeights,
data,
};
if (
JSON.stringify(experimentInfo.variations) ===
`[${Array(exp.variations.length).fill("{}").join(",")}]`
) {
experimentInfo.variations = exp.variations.length;
}
if (exp.status === "stopped" && exp.results === "won") {
experimentInfo.force = exp.winner;
}
if (exp.status === "running") {
if (!phase) return;
}
list.push(experimentInfo);
});
// TODO: add cache headers?
res.status(200).json({
status: 200,
experiments: list,
});
} catch (e) {
console.error(e);
res.status(400).json({
status: 400,
error: "Failed to get experiment config",
});
}
}
let visualDesignerContents: string;
export async function getVisualDesignerScript(req: Request, res: Response) {
if (!visualDesignerContents) {
const visualDesignerPath = path.join(
__dirname,
"..",
"..",
"node_modules",
"ab-designer",
"dist",
"ab-designer.cjs.production.min.js"
);
visualDesignerContents = fs.existsSync(visualDesignerPath)
? fs.readFileSync(visualDesignerPath).toString()
: "";
visualDesignerContents = visualDesignerContents
.replace(/\/\/# sourceMappingURL.*/, "")
.replace(/"use strict";/, "");
visualDesignerContents = `function startVisualDesigner(){${visualDesignerContents}}
if(window.location.search.match(/\\bgrowthbookVisualDesigner\\b/)) {
window.growthbook=window.growthbook||[];window.growthbook.push("disable");
window.EXP_PLATFORM_ORIGIN="${APP_ORIGIN}";
startVisualDesigner();
}`;
}
res.setHeader("Content-Type", "text/javascript");
res.send(visualDesignerContents);
}

View File

@@ -0,0 +1,77 @@
import { AuthRequest } from "../types/AuthRequest";
import { Response } from "express";
import uniqid from "uniqid";
import { getDataSourceById } from "../services/datasource";
import { DimensionModel } from "../models/DimensionModel";
import { DimensionInterface } from "../../types/dimension";
export async function getAllDimensions(req: AuthRequest, res: Response) {
const dimensions = await DimensionModel.find({
organization: req.organization.id,
});
res.status(200).json({
status: 200,
dimensions,
});
}
export async function postDimensions(
req: AuthRequest<Partial<DimensionInterface>>,
res: Response
) {
const { datasource, name, sql } = req.body;
const datasourceDoc = await getDataSourceById(datasource);
if (!datasourceDoc || datasourceDoc.organization !== req.organization.id) {
throw new Error("Invalid data source");
}
const doc = await DimensionModel.create({
datasource,
name,
sql,
id: uniqid("dim_"),
dateCreated: new Date(),
dateUpdated: new Date(),
organization: req.organization.id,
});
res.status(200).json({
status: 200,
dimension: doc,
});
}
export async function putDimension(
req: AuthRequest<Partial<DimensionInterface>>,
res: Response
) {
const { id }: { id: string } = req.params;
const dimension = await DimensionModel.findOne({
id,
});
if (!dimension) {
throw new Error("Could not find dimension");
}
if (dimension.organization !== req.organization.id) {
throw new Error("You don't have access to that dimension");
}
const { datasource, name, sql } = req.body;
const datasourceDoc = await getDataSourceById(datasource);
if (!datasourceDoc || datasourceDoc.organization !== req.organization.id) {
throw new Error("Invalid data source");
}
dimension.set("datasource", datasource);
dimension.set("name", name);
dimension.set("sql", sql);
dimension.set("dateUpdated", new Date());
await dimension.save();
res.status(200).json({
status: 200,
dimension,
});
}

View File

@@ -0,0 +1,232 @@
import { Response } from "express";
import { AuthRequest } from "../types/AuthRequest";
import { DiscussionParentType } from "../../types/discussion";
import {
addComment,
getDiscussionByParent,
getLastNDiscussions,
} from "../services/discussions";
import { getFileUploadURL } from "../services/files";
export async function postDiscussions(
req: AuthRequest<{ comment: string }>,
res: Response
) {
const {
parentId,
parentType,
}: { parentId: string; parentType: DiscussionParentType } = req.params;
const { comment } = req.body;
try {
// TODO: validate that parentType and parentId are valid for this organization
await addComment(
req.organization.id,
parentType,
parentId,
{ id: req.userId, email: req.email, name: req.name },
comment
);
res.status(200).json({
status: 200,
});
} catch (e) {
res.status(400).json({
status: 400,
message: e.message,
});
}
}
export async function deleteComment(req: AuthRequest, res: Response) {
const {
parentId,
parentType,
index,
}: {
parentId: string;
parentType: DiscussionParentType;
index: string;
} = req.params;
const i = parseInt(index);
const discussion = await getDiscussionByParent(
req.organization.id,
parentType,
parentId
);
if (!discussion) {
return res.status(404).json({
status: 404,
message: "Discussion not found",
});
}
const current = discussion.comments[parseInt(index)];
if (current && current?.userId !== req.userId) {
return res.status(403).json({
status: 403,
message: "Only the original author can delete a comment",
});
}
discussion.comments.splice(i, 1);
discussion.markModified("comments");
try {
await discussion.save();
return res.status(200).json({
status: 200,
});
} catch (e) {
return res.status(400).json({
status: 400,
message: e.message || "Error deleting comment",
});
}
}
export async function putComment(
req: AuthRequest<{ comment: string }>,
res: Response
) {
const {
parentId,
parentType,
index,
}: {
parentId: string;
parentType: DiscussionParentType;
index: string;
} = req.params;
const { comment } = req.body;
const i = parseInt(index);
const discussion = await getDiscussionByParent(
req.organization.id,
parentType,
parentId
);
if (!discussion || !discussion.comments[i]) {
return res.status(404).json({
status: 404,
message: "Discussion not found",
});
}
const current = discussion.comments[i];
if (current.userId !== req.userId) {
return res.status(403).json({
status: 403,
message: "Only the original author can edit a comment",
});
}
current.content = comment;
current.edited = true;
discussion.dateUpdated = new Date();
discussion.markModified("comments");
try {
await discussion.save();
return res.status(200).json({
status: 200,
});
} catch (e) {
return res.status(400).json({
status: 400,
message: e.message || "Error saving comment",
});
}
}
export async function getDiscussion(req: AuthRequest, res: Response) {
const {
parentId,
parentType,
}: { parentId: string; parentType: DiscussionParentType } = req.params;
try {
const discussion = await getDiscussionByParent(
req.organization.id,
parentType,
parentId
);
res.status(200).json({
status: 200,
discussion,
});
} catch (e) {
res.status(400).json({
status: 400,
message: e.message,
});
}
}
export async function getRecentDiscussions(req: AuthRequest, res: Response) {
const { num }: { num: string } = req.params;
let intNum = parseInt(num);
if (intNum > 100) intNum = 100;
try {
// since deletes can update the dateUpdated, we want to give ourselves a bit of buffer.
const discussions = await getLastNDiscussions(
req.organization.id,
intNum + 5
);
let recent: {
content: string;
date: Date;
userId: string;
userName: string;
userEmail: string;
parentType: string;
parentId: string;
}[] = [];
discussions.forEach((d) => {
d.comments.forEach((c) => {
recent.push({
content: c.content,
date: c.date,
userId: c.userId,
userName: c.userName,
userEmail: c.userEmail,
parentType: d.parentType,
parentId: d.parentId,
});
});
});
recent = recent.sort((a, b) => b.date.getTime() - a.date.getTime());
res.status(200).json({
status: 200,
discussions: recent.slice(0, intNum),
});
} catch (e) {
res.status(400).json({
status: 400,
message: e.message,
});
}
}
export async function postImageUploadUrl(req: AuthRequest, res: Response) {
const { filetype }: { filetype: string } = req.params;
const now = new Date();
const { uploadURL, fileURL } = await getFileUploadURL(
filetype,
`${req.organization.id}/uploads/${now.toISOString().substr(0, 7)}/`
);
res.status(200).json({
status: 200,
uploadURL,
fileURL,
});
}

View File

@@ -0,0 +1,61 @@
/*
import {Response} from "express";
import {AuthRequest} from "../types/AuthRequest";
import {getDataSourcesByOrganization, getDataSourceById} from "../services/datasource";
import {getTrackTableByDataSources, syncTrackTable} from "../services/events";
export async function getEvents(req: AuthRequest, res: Response) {
const datasources = await getDataSourcesByOrganization(req.organization.id);
if (!datasources || !datasources.length) {
return res.status(200).json({
status: 200,
trackTables: [],
});
}
const trackTables = await getTrackTableByDataSources(datasources.map(d => d.id));
if (!trackTables || !trackTables.length) {
return res.status(200).json({
status: 200,
trackTables: [],
});
}
res.status(200).json({
status: 200,
trackTables: trackTables,
});
}
export async function postEventsSync(req: AuthRequest<{datasource: string}>, res: Response) {
const {datasource} = req.body;
const datasourceObj = await getDataSourceById(datasource);
if (!datasourceObj) {
return res.status(404).json({
status: 404,
message: "Cannot find datasource: " + datasource
});
}
if (datasourceObj.organization !== req.organization.id) {
return res.status(403).json({
status: 403,
message: "Cannot access datasource: " + datasource
});
}
try {
await syncTrackTable(datasourceObj);
res.status(200).json({
status: 200
});
}
catch (e) {
res.status(400).json({
status: 400,
message: e.message
});
}
}
*/

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,279 @@
import { Response } from "express";
import { AuthRequest } from "../types/AuthRequest";
import {
getIdeasByOrganization,
createIdea,
getIdeaById,
deleteIdeaById,
} from "../services/ideas";
import { IdeaInterface } from "../../types/idea";
import { addTagsDiff } from "../services/tag";
import { Vote } from "../../types/vote";
import { userHasAccess } from "../services/organizations";
import {
getImpactEstimate,
ImpactEstimateModel,
createImpactEstimate,
} from "../models/ImpactEstimateModel";
import { ImpactEstimateInterface } from "../../types/impact-estimate";
export async function getIdeas(req: AuthRequest, res: Response) {
const ideas = await getIdeasByOrganization(req.organization.id);
res.status(200).json({
status: 200,
ideas,
});
}
export async function getEstimatedImpact(req: AuthRequest, res: Response) {
const {
regex,
metric,
segment,
}: { regex: string; metric: string; segment?: string } = req.body;
const estimate = await getImpactEstimate(
req.organization.id,
metric,
regex,
segment
);
res.status(200).json({
status: 200,
estimate,
});
}
export async function postEstimatedImpactManual(
req: AuthRequest<Partial<ImpactEstimateInterface>>,
res: Response
) {
const { value, metricTotal, users, metric, regex } = req.body;
const estimate = await createImpactEstimate(
req.organization.id,
metric,
null,
regex,
value,
users,
metricTotal
);
res.status(200).json({
status: 200,
estimate,
});
}
/**
* Creates a new idea
* @param req
* @param res
*/
export async function postIdeas(
req: AuthRequest<Partial<IdeaInterface>>,
res: Response
) {
const data = req.body;
data.organization = req.organization.id;
data.source = "web";
data.userId = req.userId;
const idea = await createIdea(data);
res.status(200).json({
status: 200,
idea,
});
}
export async function getIdea(
req: AuthRequest<Partial<IdeaInterface>>,
res: Response
) {
const { id }: { id: string } = req.params;
//const data = req.body;
const idea = await getIdeaById(id);
if (!idea) {
res.status(403).json({
status: 404,
message: "Idea not found",
});
return;
}
if (!(await userHasAccess(req, idea.organization))) {
res.status(403).json({
status: 403,
message: "You do not have access to this idea",
});
return;
}
let estimate = null;
if (idea.estimateParams?.estimate) {
estimate = await ImpactEstimateModel.findOne({
id: idea.estimateParams.estimate,
});
if (estimate && estimate.organization !== idea.organization) {
console.error(
"Estimate org does not match idea org",
estimate.id,
estimate.organization,
idea.organization
);
estimate = null;
}
}
res.status(200).json({
status: 200,
idea,
estimate,
});
}
/**
* Update a Idea
* @param req
* @param res
*/
export async function postIdea(req: AuthRequest<IdeaInterface>, res: Response) {
const { id }: { id: string } = req.params;
const idea = await getIdeaById(id);
const data = req.body;
if (!idea) {
res.status(403).json({
status: 404,
message: "Idea not found",
});
return;
}
if (idea.organization !== req.organization.id) {
res.status(403).json({
status: 403,
message: "You do not have access to this idea",
});
return;
}
const existing = idea.toJSON();
data.text && idea.set("text", data.text);
"details" in data && idea.set("details", data.details);
"tags" in data && idea.set("tags", data.tags);
"archived" in data && idea.set("archived", data.archived);
data.votes && idea.set("votes", data.votes);
"impactScore" in data && idea.set("impactScore", data.impactScore);
data.experimentLength && idea.set("experimentLength", data.experimentLength);
data.estimateParams && idea.set("estimateParams", data.estimateParams);
await idea.save();
if (data.tags && data.tags.length > 0) {
await addTagsDiff(req.organization.id, existing.tags || [], data.tags);
}
res.status(200).json({
status: 200,
idea,
});
}
export async function deleteIdea(
req: AuthRequest<IdeaInterface>,
res: Response
) {
const { id }: { id: string } = req.params;
const idea = await getIdeaById(id);
if (!idea) {
res.status(403).json({
status: 404,
message: "Idea not found",
});
return;
}
if (idea.organization !== req.organization.id) {
res.status(403).json({
status: 403,
message: "You do not have access to this idea",
});
return;
}
// note: we might want to change this to change the status to
// 'deleted' instead of actually deleting the document.
const del = await deleteIdeaById(idea.id);
res.status(200).json({
status: 200,
result: del,
});
}
export async function postVote(req: AuthRequest<Partial<Vote>>, res: Response) {
const { id }: { id: string } = req.params;
const data = req.body;
const idea = await getIdeaById(id);
if (!idea) {
res.status(403).json({
status: 404,
message: "Idea not found",
});
return;
}
if (idea.organization !== req.organization.id) {
res.status(403).json({
status: 403,
message: "You do not have access to this idea",
});
return;
}
try {
const newVote = data.dir > 0 ? 1 : -1;
let found = false;
if (idea.votes) {
// you can only vote once, see if they've already voted
idea.votes.map((v) => {
if (v.userId === req.userId) {
// they have changed their vote, or are voting again
v.dir = newVote;
v.dateUpdated = new Date();
found = true;
}
});
}
if (!found) {
// add the vote:
const v: Vote = {
userId: req.userId,
dir: newVote,
dateCreated: new Date(),
dateUpdated: new Date(),
};
idea.votes.push(v);
}
await idea.save();
res.status(200).json({
status: 200,
idea: idea,
});
} catch (e) {
res.status(400).json({
status: 400,
message: e.message,
});
console.error(e);
}
}

View File

@@ -0,0 +1,232 @@
import { Response } from "express";
import { AuthRequest } from "../types/AuthRequest";
import {
getLearningsByOrganization,
createLearning,
getLearningById,
deleteLearningById,
} from "../services/learnings";
import { LearningInterface } from "../../types/insight";
import { getExperimentById } from "../services/experiments";
import { Vote } from "../../types/vote";
import { addTagsDiff } from "../services/tag";
import { userHasAccess } from "../services/organizations";
export async function getLearnings(req: AuthRequest, res: Response) {
const learnings = await getLearningsByOrganization(req.organization.id);
res.status(200).json({
status: 200,
learnings,
});
}
/**
* Creates a new learning
* @param req
* @param res
*/
export async function postLearnings(
req: AuthRequest<Partial<LearningInterface>>,
res: Response
) {
const data = req.body;
data.organization = req.organization.id;
data.userId = req.userId;
const learning = await createLearning(data);
res.status(200).json({
status: 200,
learning,
});
}
export async function getLearning(
req: AuthRequest<Partial<LearningInterface>>,
res: Response
) {
const { id }: { id: string } = req.params;
const learning = await getLearningById(id);
if (!learning) {
res.status(403).json({
status: 404,
message: "Learning not found",
});
return;
}
if (!(await userHasAccess(req, learning.organization))) {
res.status(403).json({
status: 403,
message: "You do not have access to this learning",
});
return;
}
// get the experiments for this learning:
// (could make this an IN query and do it once...)
const getExperiments = async () => {
const results = [];
for (let i = 0; i < learning.evidence.length; i++) {
results.push(getExperimentById(learning.evidence[i].experimentId));
}
return await Promise.all(results);
};
const experiments = await getExperiments();
res.status(200).json({
status: 200,
learning,
experiments,
});
}
/**
* Update a learning
* @param req
* @param res
*/
export async function postLearning(
req: AuthRequest<LearningInterface>,
res: Response
) {
const { id }: { id: string } = req.params;
const learning = await getLearningById(id);
const data = req.body;
if (!learning) {
res.status(403).json({
status: 404,
message: "Learning not found",
});
return;
}
if (learning.organization !== req.organization.id) {
res.status(403).json({
status: 403,
message: "You do not have access to this learning",
});
return;
}
const existing = learning.toJSON();
"text" in data && learning.set("text", data.text);
"details" in data && learning.set("details", data.details);
"tags" in data && learning.set("tags", data.tags);
"evidence" in data && learning.set("evidence", data.evidence);
"votes" in data && learning.set("votes", data.votes);
await learning.save();
if ("tags" in data) {
await addTagsDiff(
req.organization.id,
existing.tags || [],
data.tags || []
);
}
res.status(200).json({
status: 200,
learning,
});
}
export async function deleteLearning(
req: AuthRequest<LearningInterface>,
res: Response
) {
const { id }: { id: string } = req.params;
const exp = await getLearningById(id);
if (!exp) {
res.status(403).json({
status: 404,
message: "Experiment not found",
});
return;
}
if (exp.organization !== req.organization.id) {
res.status(403).json({
status: 403,
message: "You do not have access to this experiment",
});
return;
}
// note: we might want to change this to change the status to
// 'deleted' instead of actually deleting the document.
const del = await deleteLearningById(exp.id);
res.status(200).json({
status: 200,
result: del,
});
}
export async function postVote(req: AuthRequest<Partial<Vote>>, res: Response) {
const { id }: { id: string } = req.params;
const data = req.body;
const learning = await getLearningById(id);
if (!learning) {
res.status(403).json({
status: 404,
message: "Learning not found",
});
return;
}
if (learning.organization !== req.organization.id) {
res.status(403).json({
status: 403,
message: "You do not have access to this learning",
});
return;
}
try {
const newVote = data.dir > 0 ? 1 : -1;
let found = false;
if (learning.votes) {
// you can only vote once, see if they've already voted
learning.votes.map((v) => {
if (v.userId === req.userId) {
// they have changed their vote, or are voting again
v.dir = newVote;
v.dateUpdated = new Date();
found = true;
}
});
}
if (!found) {
// add the vote:
const v: Vote = {
userId: req.userId,
dir: newVote,
dateCreated: new Date(),
dateUpdated: new Date(),
};
learning.votes.push(v);
}
await learning.save();
res.status(200).json({
status: 200,
learning: learning,
});
} catch (e) {
res.status(400).json({
status: 400,
message: e.message,
});
console.error(e);
}
}

View File

@@ -0,0 +1,889 @@
import { Response } from "express";
import { AuthRequest } from "../types/AuthRequest";
import {
createOrganization,
acceptInvite,
inviteUser,
removeMember,
revokeInvite,
getInviteUrl,
getAllOrganizationsByUserId,
getRole,
} from "../services/organizations";
import {
DataSourceParams,
DataSourceType,
DataSourceSettings,
} from "../../types/datasource";
import {
createDataSource,
getDataSourcesByOrganization,
getDataSourceById,
testDataSourceConnection,
mergeAndEncryptParams,
getSourceIntegrationObject,
} from "../services/datasource";
import { createUser, getUsersByIds } from "../services/users";
import mongoose from "mongoose";
import { getAllTags } from "../services/tag";
import {
getAllApiKeysByOrganization,
createApiKey,
deleteByOrganizationAndApiKey,
} from "../services/apiKey";
import { getOauth2Client } from "../integrations/GoogleAnalytics";
import { UserModel } from "../models/UserModel";
import { MemberRole, OrganizationInterface } from "../../types/organization";
import {
getWatchedAudits,
findByEntity,
findByEntityParent,
} from "../services/audit";
import { WatchModel } from "../models/WatchModel";
import { ExperimentModel } from "../models/ExperimentModel";
import { QueryModel } from "../models/QueryModel";
import { getMetricsByDatasource } from "../services/experiments";
import { SegmentModel } from "../models/SegmentModel";
import { DimensionModel } from "../models/DimensionModel";
import { IS_CLOUD } from "../util/secrets";
import logger from "../util/logger";
import { sendInviteEmail, sendNewOrgEmail } from "../services/email";
import { DataSourceModel } from "../models/DataSourceModel";
import { GoogleAnalyticsParams } from "../../types/integrations/googleanalytics";
export async function getUser(req: AuthRequest, res: Response) {
// Ensure user exists in database
if (!req.userId) {
if (IS_CLOUD) {
const user = await createUser(req.name, req.email);
req.userId = user.id;
} else {
throw new Error("Must be logged in");
}
}
// List of all organizations the user belongs to
const orgs = await getAllOrganizationsByUserId(req.userId);
return res.status(200).json({
status: 200,
userId: req.userId,
userName: req.name,
email: req.email,
admin: !!req.admin,
organizations: orgs.map((org) => ({
id: org.id,
name: org.name,
subscriptionStatus: org.subscription?.status,
trialEnd: org.subscription?.trialEnd,
role: getRole(org, req.userId),
settings: org.settings || {},
})),
});
}
export async function getUsers(req: AuthRequest, res: Response) {
let users: { id: string; name: string; email: string }[] = [];
if (req.organization) {
const members = await getUsersByIds(
req.organization.members.map((m) => m.id)
);
users = members.map(({ id, name, email }) => ({
id,
name,
email,
}));
}
res.status(200).json({
status: 200,
users,
});
}
export async function getActivityFeed(req: AuthRequest, res: Response) {
try {
const docs = await getWatchedAudits(req.userId, req.organization.id, {
limit: 50,
});
if (!docs.length) {
return res.status(200).json({
status: 200,
events: [],
experiments: [],
});
}
const experimentIds = Array.from(new Set(docs.map((d) => d.entity.id)));
const experiments = await ExperimentModel.find(
{
id: {
$in: experimentIds,
},
},
{
_id: false,
id: true,
name: true,
}
);
res.status(200).json({
status: 200,
events: docs,
experiments,
});
} catch (e) {
res.status(400).json({
status: 400,
message: e.message,
});
}
}
export async function getWatchedExperiments(req: AuthRequest, res: Response) {
try {
const watch = await WatchModel.findOne({
userId: req.userId,
organization: req.organization.id,
});
res.status(200).json({
status: 200,
experiments: watch?.experiments || [],
});
} catch (e) {
res.status(400).json({
status: 400,
message: e.message,
});
}
}
export async function getHistory(req: AuthRequest, res: Response) {
const { type, id }: { type: string; id: string } = req.params;
const events = await Promise.all([
findByEntity(type, id),
findByEntityParent(type, id),
]);
const merged = [...events[0], ...events[1]];
merged.sort((a, b) => {
if (b.dateCreated > a.dateCreated) return 1;
else if (b.dateCreated < a.dateCreated) return -1;
return 0;
});
if (merged.filter((e) => e.organization !== req.organization.id).length > 0) {
return res.status(403).json({
status: 403,
message: "You do not have access to view history for this",
});
}
res.status(200).json({
status: 200,
events: merged,
});
}
export async function putUserName(
req: AuthRequest<{ name: string }>,
res: Response
) {
const { name } = req.body;
try {
await UserModel.updateOne(
{
id: req.userId,
},
{
$set: {
name,
},
}
);
res.status(200).json({
status: 200,
});
} catch (e) {
res.status(400).json({
status: 400,
message: e.message || "An error occurred",
});
}
}
export async function putMemberRole(
req: AuthRequest<{ role: MemberRole }>,
res: Response
) {
if (!req.permissions.organizationSettings) {
return res.status(403).json({
status: 403,
message: "You do not have permission to perform that action.",
});
}
const { role } = req.body;
const { id }: { id: string } = req.params;
if (id === req.userId) {
return res.status(400).json({
status: 400,
message: "Cannot change your own role",
});
}
let found = false;
req.organization.members.forEach((m) => {
if (m.id === id) {
m.role = role;
found = true;
}
});
if (!found) {
return res.status(404).json({
status: 404,
message: "Cannot find member",
});
}
req.organization.markModified("members");
try {
await req.organization.save();
return res.status(200).json({
status: 200,
});
} catch (e) {
return res.status(400).json({
status: 400,
message: e.message || "Failed to change role",
});
}
}
export async function getOrganization(req: AuthRequest, res: Response) {
if (!req.organization) {
return res.status(200).json({
status: 200,
organization: null,
});
}
if (!req.permissions.organizationSettings) {
return res.status(403).json({
status: 403,
message: "You do not have permission to perform that action.",
});
}
const {
invites,
members,
ownerEmail,
name,
url,
subscription,
connections,
settings,
} = req.organization;
const roleMapping: Map<string, MemberRole> = new Map();
members.forEach((m) => {
roleMapping.set(m.id, m.role);
});
const users = await getUsersByIds(members.map((m) => m.id));
return res.status(200).json({
status: 200,
organization: {
invites,
ownerEmail,
name,
url,
subscription,
slackTeam: connections?.slack?.team,
members: users.map(({ id, email, name }) => {
return {
id,
email,
name,
role: roleMapping.get(id),
};
}),
settings,
},
});
}
export async function postInviteAccept(req: AuthRequest, res: Response) {
const { key } = req.body;
try {
const org = await acceptInvite(key, req.userId);
return res.status(200).json({
status: 200,
orgId: org.id,
});
} catch (e) {
return res.status(400).json({
status: 400,
message: e.message,
});
}
}
export async function postInvite(req: AuthRequest, res: Response) {
if (!req.permissions.organizationSettings) {
return res.status(403).json({
status: 403,
message: "You do not have permission to perform that action.",
});
}
const { email, role } = req.body;
if (!req.organization) {
return res.status(400).json({
status: 400,
message: "Must be part of an organization to invite users",
});
}
const { emailSent, inviteUrl } = await inviteUser(
req.organization,
email,
role
);
return res.status(200).json({
status: 200,
inviteUrl,
emailSent,
});
}
interface SignupBody {
company: string;
}
export async function deleteMember(req: AuthRequest, res: Response) {
if (!req.permissions.organizationSettings) {
return res.status(403).json({
status: 403,
message: "You do not have permission to perform that action.",
});
}
const { id }: { id: string } = req.params;
if (id === req.userId) {
return res.status(400).json({
status: 400,
message: "Cannot change your own role",
});
}
if (!req.organization) {
return res.status(400).json({
status: 400,
message: "Must be part of an organization to remove a member",
});
}
await removeMember(req.organization, id);
res.status(200).json({
status: 200,
});
}
export async function deleteDataSource(req: AuthRequest, res: Response) {
if (!req.permissions.organizationSettings) {
return res.status(403).json({
status: 403,
message: "You do not have permission to perform that action.",
});
}
const { id }: { id: string } = req.params;
const datasource = await getDataSourceById(id);
if (!datasource) {
throw new Error("Cannot find datasource");
}
if (datasource.organization !== req.organization?.id) {
throw new Error("You don't have permission to delete this datasource.");
}
// Make sure there are no metrics
const metrics = await getMetricsByDatasource(datasource.id);
if (metrics.length > 0) {
throw new Error(
"Error: Please delete all metrics tied to this datasource first."
);
}
// Make sure there are no segments
const segments = await SegmentModel.find({
datasource: datasource.id,
});
if (segments.length > 0) {
throw new Error(
"Error: Please delete all segments tied to this datasource first."
);
}
// Make sure there are no dimensions
const dimensions = await DimensionModel.find({
datasource: datasource.id,
});
if (dimensions.length > 0) {
throw new Error(
"Error: Please delete all dimensions tied to this datasource first."
);
}
await DataSourceModel.deleteOne({
_id: datasource._id,
});
res.status(200).json({
status: 200,
});
}
export async function postInviteResend(
req: AuthRequest<{ key: string }>,
res: Response
) {
if (!req.permissions.organizationSettings) {
return res.status(403).json({
status: 403,
message: "You do not have permission to perform that action.",
});
}
const { key } = req.body;
if (!req.organization) {
return res.status(400).json({
status: 400,
message: "Must be part of an organization to remove an invitation",
});
}
let emailSent = false;
try {
await sendInviteEmail(req.organization, key);
emailSent = true;
} catch (e) {
emailSent = false;
}
const inviteUrl = getInviteUrl(key);
return res.status(200).json({
status: 200,
inviteUrl,
emailSent,
});
}
export async function deleteInvite(
req: AuthRequest<{ key: string }>,
res: Response
) {
if (!req.permissions.organizationSettings) {
return res.status(403).json({
status: 403,
message: "You do not have permission to perform that action.",
});
}
const { key } = req.body;
if (!req.organization) {
return res.status(400).json({
status: 400,
message: "Must be part of an organization to remove an invitation",
});
}
await revokeInvite(req.organization, key);
res.status(200).json({
status: 200,
});
}
export async function signup(req: AuthRequest<SignupBody>, res: Response) {
const { company } = req.body;
try {
if (company.length < 3) {
throw Error("Company length must be at least 3 characters");
}
const org = await createOrganization(req.email, req.userId, company, "");
// Alert the site manager about new organizations that are created
try {
await sendNewOrgEmail(company, req.email);
} catch (e) {
logger.error("New org email sending failure:");
logger.error(e.message);
}
res.status(200).json({
status: 200,
orgId: org.id,
});
} catch (e) {
res.status(400).json({
status: 400,
message: e.message || "An error occurred",
});
}
}
export async function putOrganization(
req: AuthRequest<Partial<OrganizationInterface>>,
res: Response
) {
if (!req.permissions.organizationSettings) {
return res.status(403).json({
status: 403,
message: "You do not have permission to perform that action.",
});
}
const { name, settings } = req.body;
try {
name && req.organization.set("name", name);
if (settings) {
"implementationTypes" in settings &&
req.organization.set(
"settings.implementationTypes",
settings.implementationTypes
);
"customized" in settings &&
req.organization.set("settings.customized", settings.customized);
"logoPath" in settings &&
req.organization.set("settings.logoPath", settings.logoPath);
"primaryColor" in settings &&
req.organization.set("settings.primaryColor", settings.primaryColor);
"secondaryColor" in settings &&
req.organization.set(
"settings.secondaryColor",
settings.secondaryColor
);
}
await req.organization.save();
res.status(200).json({
status: 200,
});
} catch (e) {
res.status(400).json({
status: 400,
message: e.message || "An error occurred",
});
}
}
export async function getDataSources(req: AuthRequest, res: Response) {
const datasources = await getDataSourcesByOrganization(req.organization.id);
if (!datasources || !datasources.length) {
res.status(200).json({
status: 200,
datasources: [],
});
return;
}
res.status(200).json({
status: 200,
datasources: datasources.map((d) => {
const integration = getSourceIntegrationObject(d);
return {
id: d.id,
name: d.name,
type: d.type,
settings: d.settings,
params: integration.getNonSensitiveParams(),
};
}),
});
}
export async function getDataSource(req: AuthRequest, res: Response) {
if (!req.permissions.organizationSettings) {
return res.status(403).json({
status: 403,
message: "You do not have permission to perform that action.",
});
}
const { id }: { id: string } = req.params;
const datasource = await getDataSourceById(id);
if (!datasource) {
res.status(404).json({
status: 404,
message: "Cannot find data source",
});
return;
}
if (datasource.organization !== req.organization.id) {
res.status(403).json({
status: 403,
message: "You don't have access to that data source",
});
return;
}
const integration = getSourceIntegrationObject(datasource);
res.status(200).json({
id: datasource.id,
name: datasource.name,
type: datasource.type,
params: integration.getNonSensitiveParams(),
settings: datasource.settings,
});
}
export async function postDataSources(
req: AuthRequest<{
name: string;
type: DataSourceType;
params: DataSourceParams;
settings: DataSourceSettings;
}>,
res: Response
) {
if (!req.permissions.organizationSettings) {
return res.status(403).json({
status: 403,
message: "You do not have permission to perform that action.",
});
}
const { name, type, params, settings } = req.body;
try {
await createDataSource(req.organization.id, name, type, params, settings);
res.status(200).json({
status: 200,
});
} catch (e) {
res.status(400).json({
status: 400,
message: e.message || "An error occurred",
});
}
}
export async function getTags(req: AuthRequest, res: Response) {
const tags = await getAllTags(req.organization.id);
res.status(200).json({
status: 200,
tags,
});
}
export async function putDataSource(
req: AuthRequest<{
name: string;
type: DataSourceType;
params: DataSourceParams;
settings: DataSourceSettings;
}>,
res: Response
) {
if (!req.permissions.organizationSettings) {
return res.status(403).json({
status: 403,
message: "You do not have permission to perform that action.",
});
}
const { id }: { id: string } = req.params;
const { name, type, params, settings } = req.body;
const datasource = await getDataSourceById(id);
if (!datasource) {
res.status(404).json({
status: 404,
message: "Cannot find data source",
});
return;
}
if (datasource.organization !== req.organization.id) {
res.status(403).json({
status: 403,
message: "You don't have access to that data source",
});
return;
}
if (type !== datasource.type) {
res.status(400).json({
status: 400,
message:
"Cannot change the type of an existing data source. Create a new one instead.",
});
return;
}
try {
datasource.set("name", name);
datasource.set("dateUpdated", new Date());
console.log(settings);
datasource.set("settings", settings);
if (
type === "google_analytics" &&
(params as GoogleAnalyticsParams).refreshToken
) {
const oauth2Client = getOauth2Client();
const { tokens } = await oauth2Client.getToken(
(params as GoogleAnalyticsParams).refreshToken
);
(params as GoogleAnalyticsParams).refreshToken = tokens.refresh_token;
}
const newParams = mergeAndEncryptParams(params, datasource.params);
if (newParams !== datasource.params) {
// If the connection params changed, re-validate the connection
// If the user is just updating the display name, no need to do this
datasource.set("params", newParams);
await testDataSourceConnection(datasource);
}
await (datasource as mongoose.Document).save();
res.status(200).json({
status: 200,
});
} catch (e) {
console.error(e);
res.status(400).json({
status: 400,
message: e.message || "An error occurred",
});
}
}
export async function getApiKeys(req: AuthRequest, res: Response) {
const keys = await getAllApiKeysByOrganization(req.organization.id);
res.status(200).json({
status: 200,
keys,
});
}
export async function postApiKey(
req: AuthRequest<{ description?: string }>,
res: Response
) {
if (!req.permissions.organizationSettings) {
return res.status(403).json({
status: 403,
message: "You do not have permission to perform that action.",
});
}
const { description } = req.body;
const key = await createApiKey(req.organization.id, description);
res.status(200).json({
status: 200,
key,
});
}
export async function deleteApiKey(req: AuthRequest, res: Response) {
if (!req.permissions.organizationSettings) {
return res.status(403).json({
status: 403,
message: "You do not have permission to perform that action.",
});
}
const { key }: { key: string } = req.params;
await deleteByOrganizationAndApiKey(req.organization.id, key);
res.status(200).json({
status: 200,
});
}
export async function postGoogleOauthRedirect(req: AuthRequest, res: Response) {
if (!req.permissions.organizationSettings) {
return res.status(403).json({
status: 403,
message: "You do not have permission to perform that action.",
});
}
const oauth2Client = getOauth2Client();
const url = oauth2Client.generateAuthUrl({
// eslint-disable-next-line
access_type: "offline",
// eslint-disable-next-line
include_granted_scopes: true,
prompt: "consent",
scope: "https://www.googleapis.com/auth/analytics.readonly",
});
res.status(200).json({
status: 200,
url,
});
}
export async function getQueries(req: AuthRequest, res: Response) {
const { ids }: { ids: string } = req.params;
const queries = ids.split(",");
const docs = await QueryModel.find({
organization: req.organization.id,
id: {
$in: queries,
},
});
// Lookup table so we can return queries in the same order we received them
const map = new Map();
docs.forEach((doc) => {
// If we haven't gotten a heartbeat in a while, change the status to failed
if (
doc.status === "running" &&
Date.now() - doc.heartbeat.getTime() > 120000
) {
doc.set("status", "failed");
doc.set("error", "Query aborted");
doc.save();
}
map.set(doc.id, doc);
});
res.status(200).json({
queries: queries.map((id) => map.get(id) || null),
});
}

View File

@@ -0,0 +1,216 @@
import { Response } from "express";
import { AuthRequest } from "../types/AuthRequest";
import {
getPresentationById,
getPresentationsByOrganization,
createPresentation,
deletePresentationById,
} from "../services/presentations";
import {
getExperimentsByIds,
getLatestSnapshot,
} from "../services/experiments";
import { getLearningsByExperimentIds } from "../services/learnings";
import { userHasAccess } from "../services/organizations";
import { LearningInterface } from "../../types/insight";
import { ExperimentInterface } from "../../types/experiment";
import { ExperimentSnapshotInterface } from "../../types/experiment-snapshot";
import { PresentationInterface } from "../../types/presentation";
export async function getPresentations(req: AuthRequest, res: Response) {
const presentations = await getPresentationsByOrganization(
req.organization.id
);
const learnings: Record<string, LearningInterface[]> = {};
await Promise.all(
presentations.map(async (v) => {
if (v.experimentIds) {
// get the experiments to show?
//v.experiments = await getExperimentsByIds(v.experimentIds);
// get the learnings?
learnings[v.id] = await getLearningsByExperimentIds(v.experimentIds);
}
})
);
res.status(200).json({
status: 200,
presentations,
learnings,
});
}
export async function getPresentation(req: AuthRequest, res: Response) {
const { id }: { id: string } = req.params;
const pres = await getPresentationById(id);
if (!pres) {
res.status(403).json({
status: 404,
message: "Presentation not found",
});
return;
}
if (!(await userHasAccess(req, pres.organization))) {
res.status(403).json({
status: 403,
message: "You do not have access to this presentation",
});
return;
}
// get the experiments to present in this presentations:
let expIds: string[] = [];
if (pres.experimentIds) {
expIds = pres.experimentIds;
} else {
// use some other way to find the experiments... perhaps by search query options.
//TODO
}
const experiments = await getExperimentsByIds(pres.experimentIds);
// was trying to push the experiments into the presentation model,
// but that wouldn't work for some reason
const withSnapshots: {
experiment: ExperimentInterface;
snapshot: ExperimentSnapshotInterface;
}[] = [];
const promises = experiments.map(async (experiment, i) => {
const snapshot = await getLatestSnapshot(
experiment.id,
experiment.phases.length - 1
);
withSnapshots[i] = {
experiment,
snapshot,
};
});
await Promise.all(promises);
// get the learnigns associated with these experiments:
const learnings = await getLearningsByExperimentIds(expIds);
res.status(200).json({
status: 200,
presentation: pres,
learnings,
experiments: withSnapshots,
});
}
export async function deletePresentation(
req: AuthRequest<ExperimentInterface>,
res: Response
) {
const { id }: { id: string } = req.params;
const p = await getPresentationById(id);
if (!p) {
res.status(403).json({
status: 404,
message: "Presentation not found",
});
return;
}
if (p.organization !== req.organization.id) {
res.status(403).json({
status: 403,
message: "You do not have access to this presentation",
});
return;
}
// note: we might want to change this to change the status to
// 'deleted' instead of actually deleting the document.
const del = await deletePresentationById(p.id);
res.status(200).json({
status: 200,
result: del,
});
}
/**
* Creates a new presentation
* @param req
* @param res
*/
export async function postPresentation(
req: AuthRequest<Partial<PresentationInterface>>,
res: Response
) {
const data = req.body;
data.organization = req.organization.id;
data.userId = req.userId;
const presentation = await createPresentation(data);
res.status(200).json({
status: 200,
presentation,
});
}
/**
* Update a presentation
* @param req
* @param res
*/
export async function updatePresentation(
req: AuthRequest<PresentationInterface>,
res: Response
) {
const { id }: { id: string } = req.params;
const data = req.body;
const p = await getPresentationById(id);
if (!p) {
res.status(403).json({
status: 404,
message: "Presentation not found",
});
return;
}
if (p.organization !== req.organization.id) {
res.status(403).json({
status: 403,
message: "You do not have access to this presentation",
});
return;
}
try {
// the comp above doesn't work for arrays:
// not sure here of the best way to check
// for changes in the arrays, so just going to save it
if (data["title"] !== p["title"]) p.set("title", data["title"]);
if (data["description"] !== p["description"])
p.set("description", data["description"]);
p.set("experimentIds", data["experimentIds"]);
p.set("options", data["options"]);
p.set("dateUpdated", new Date());
await p.save();
res.status(200).json({
status: 200,
presentation: p,
});
} catch (e) {
console.log("caught error...");
console.error(e);
res.status(400).json({
status: 400,
message: e.message || "An error occurred",
});
}
}

View File

@@ -0,0 +1,97 @@
/*
import {Response} from "express";
import {AuthRequest} from "../types/AuthRequest";
import {ReportInterface} from "../models/ReportModel";
import {getAllReportsByOrganization, createReport, getReportById, runReport} from "../services/reports";
export async function getReports(req: AuthRequest, res: Response) {
const reports = await getAllReportsByOrganization(req.organization.id);
res.status(200).json({
status: 200,
reports
});
}
export async function getReport(req: AuthRequest, res: Response) {
const {id}: {id: string} = req.params;
const report = await getReportById(id);
if (!report) {
res.status(404).json({
status: 404,
message: "Report not found"
});
return;
}
if (report.organization !== req.organization.id) {
res.status(401).json({
status: 401,
message: "You don't have access to view this report"
});
return;
}
try {
const results = await runReport(id, true);
res.status(200).json({
status: 200,
report,
results,
});
}
catch (e) {
res.status(200).json({
status: 200,
report,
results: [],
error: e.message,
});
}
}
export async function postReports(req: AuthRequest, res: Response) {
const report = await createReport(req.organization.id);
res.status(200).json({
status: 200,
report: report.id
});
}
export async function putReport(req: AuthRequest<ReportInterface>, res: Response) {
const {id}: {id: string} = req.params;
const data = req.body;
const report = await getReportById(id);
if (!report) {
res.status(404).json({
status: 404,
message: "Report not found"
});
return;
}
if (report.organization !== req.organization.id) {
res.status(401).json({
status: 401,
message: "You don't have access to view this report"
});
return;
}
const allowedKeys = ["title", "description", "queries"];
allowedKeys.forEach((k: keyof ReportInterface) => {
if (k in data && data[k] !== report[k]) {
report.set(k, data[k]);
}
});
await report.save();
res.status(200).json({
status: 200
});
}
*/

View File

@@ -0,0 +1,469 @@
import { AuthRequest } from "../types/AuthRequest";
import { Response } from "express";
import {
SegmentComparisonModel,
SegmentComparisonDocument,
} from "../models/SegmentComparisonModel";
import { userHasAccess } from "../services/organizations";
import uniqid from "uniqid";
import {
getSourceIntegrationObject,
getDataSourceById,
} from "../services/datasource";
import {
countABTest,
binomialABTest,
ABTestStats,
bootstrapABTest,
getValueCR,
} from "../services/stats";
import { getMetricsByDatasource } from "../services/experiments";
import {
QueryMap,
getUsers,
getStatusEndpoint,
getMetricValue,
startRun,
cancelRun,
} from "../services/queries";
import { QueryDocument } from "../models/QueryModel";
import {
MetricValueResult,
UsersResult,
UsersQueryParams,
MetricStats,
} from "../types/Integration";
import { SegmentModel, SegmentDocument } from "../models/SegmentModel";
import { SegmentInterface } from "../../types/segment";
import {
SegmentComparisonInterface,
SegmentComparisonResults,
} from "../../types/segment-comparison";
export async function getAllSegments(req: AuthRequest, res: Response) {
const segments = await SegmentModel.find({
organization: req.organization.id,
});
res.status(200).json({
status: 200,
segments,
});
}
export async function postSegments(
req: AuthRequest<Partial<SegmentInterface>>,
res: Response
) {
const { datasource, name, sql, targeting } = req.body;
const datasourceDoc = await getDataSourceById(datasource);
if (!datasourceDoc || datasourceDoc.organization !== req.organization.id) {
throw new Error("Invalid data source");
}
const doc = await SegmentModel.create({
datasource,
name,
sql,
targeting,
id: uniqid("seg_"),
dateCreated: new Date(),
dateUpdated: new Date(),
organization: req.organization.id,
});
res.status(200).json({
status: 200,
segment: doc,
});
}
export async function putSegment(
req: AuthRequest<Partial<SegmentInterface>>,
res: Response
) {
const { id }: { id: string } = req.params;
const segment = await SegmentModel.findOne({
id,
});
if (!segment) {
throw new Error("Could not find segment");
}
if (segment.organization !== req.organization.id) {
throw new Error("You don't have access to that segment");
}
const { datasource, name, sql, targeting } = req.body;
const datasourceDoc = await getDataSourceById(datasource);
if (!datasourceDoc || datasourceDoc.organization !== req.organization.id) {
throw new Error("Invalid data source");
}
segment.set("datasource", datasource);
segment.set("name", name);
segment.set("sql", sql);
segment.set("targeting", targeting);
segment.set("dateUpdated", new Date());
await segment.save();
res.status(200).json({
status: 200,
segment,
});
}
export async function getAllSegmentComparisons(
req: AuthRequest,
res: Response
) {
// List of comparisons without results (results are very large, so want to reduce bandwidth)
const comparisons = await SegmentComparisonModel.find(
{
organization: req.organization.id,
},
{
results: false,
queries: false,
}
);
res.status(200).json({
status: 200,
comparisons: comparisons || [],
});
}
export async function getSegmentComparison(req: AuthRequest, res: Response) {
const { id }: { id: string } = req.params;
const comparison = await SegmentComparisonModel.findOne({
id,
});
if (!comparison) {
return res.status(404).json({
status: 404,
message: "Could not find segment comparison",
});
}
if (!userHasAccess(req, comparison.organization)) {
return res.status(403).json({
status: 403,
message: "You do not have access to this segment comparison",
});
}
res.status(200).json({
status: 200,
comparison,
});
}
async function processResults(
doc: SegmentComparisonDocument,
data: QueryMap
): Promise<SegmentComparisonResults> {
const segment1Users: UsersResult = data.get("users_segment1")
?.result as UsersResult;
const segment2Users: UsersResult = data.get("users_segment2")
?.result as UsersResult;
const results: SegmentComparisonResults = {
users: {
segment1: segment1Users?.users || 0,
segment2: segment2Users?.users || 0,
},
metrics: {},
};
// Stats for each metric
const metrics = await getMetricsByDatasource(doc.datasource);
const selectedMetrics = metrics.filter((m) => doc.metrics.includes(m.id));
selectedMetrics.forEach((m) => {
const segment1Result: MetricValueResult = data.get(`${m.id}_segment1`)
?.result;
const segment2Result: MetricValueResult = data.get(`${m.id}_segment2`)
?.result;
// TODO: support calculating total from dates
const v1Stats: MetricStats = {
count: segment1Result?.count || 0,
mean: segment1Result?.mean || 0,
stddev: segment1Result?.stddev || 0,
};
const v2Stats: MetricStats = {
count: segment2Result?.count || 0,
mean: segment2Result?.mean || 0,
stddev: segment2Result?.stddev || 0,
};
const v1 = v1Stats.mean * v1Stats.count;
const v2 = v2Stats.mean * v2Stats.count;
let stats: ABTestStats;
if (!v1 || !v2 || !results.users.segment1 || !results.users.segment2) {
stats = {
buckets: [],
chanceToWin: 0,
ci: [0, 0],
expected: 0,
};
} else if (m.type === "duration") {
stats = bootstrapABTest(
v1Stats,
results.users.segment1,
v2Stats,
results.users.segment2,
m.ignoreNulls
);
} else if (m.type === "revenue") {
stats = bootstrapABTest(
v1Stats,
results.users.segment1,
v2Stats,
results.users.segment2,
m.ignoreNulls
);
} else if (m.type === "count") {
stats = countABTest(
v1,
results.users.segment1,
v2,
results.users.segment2
);
} else if (m.type === "binomial") {
stats = binomialABTest(
v1,
results.users.segment1 - v1,
v2,
results.users.segment2 - v2
);
} else {
throw new Error("Not support for metrics of type " + m.type);
}
if (m.inverse) {
stats.chanceToWin = 1 - stats.chanceToWin;
}
results.metrics[m.id] = {
segment1: getValueCR(m, v1, v1Stats.count, results.users.segment1),
segment2: {
...getValueCR(m, v2, v2Stats.count, results.users.segment2),
...stats,
},
};
});
return results;
}
export async function getSegmentComparisonStatus(
req: AuthRequest,
res: Response
) {
const { id }: { id: string } = req.params;
const comparison = await SegmentComparisonModel.findOne({ id });
const result = await getStatusEndpoint(
comparison,
req.organization.id,
"results",
(data) => processResults(comparison, data)
);
return res.status(200).json(result);
}
export async function cancelSegmentComparison(req: AuthRequest, res: Response) {
const { id }: { id: string } = req.params;
const comparison = await SegmentComparisonModel.findOne({
id,
organization: req.organization.id,
});
res.status(200).json(await cancelRun(comparison, req.organization.id));
}
function parseApiDate(date: string | Date | null | undefined): Date {
return new Date(date);
}
function needsNewResults(data: Partial<SegmentComparisonInterface>) {
// Missing datasource or segment, return false
if (!data.datasource) return false;
if (!data.segment1?.segment) return false;
if (!data.segment2?.segment) return false;
// Missing segment dates, return false
if (!data.segment1?.from) return false;
if (!data.segment1?.to) return false;
if (!data.segment2?.sameDateRange && !data.segment2?.from) return false;
if (!data.segment2?.sameDateRange && !data.segment2?.to) return false;
// No metrics, return false
if (!data.metrics || !data.metrics.length) return false;
// Otherwise, run the analysis
return true;
}
export async function putSegmentComparison(
req: AuthRequest<Partial<SegmentComparisonInterface>>,
res: Response
) {
const { id }: { id: string } = req.params;
const data = req.body;
// Turn dates into actual Date objects
data.segment1.from = parseApiDate(data.segment1?.from);
data.segment1.to = parseApiDate(data.segment1?.to);
data.segment2.from = parseApiDate(data.segment2?.from);
data.segment2.to = parseApiDate(data.segment2?.to);
const comparison = await SegmentComparisonModel.findOne({
id,
});
if (comparison.organization !== req.organization.id) {
return res.status(403).json({
status: 403,
message: "You do not have access to this segment comparison",
});
}
// Only recalculate results if something meaningful changed (e.g. the sql queries)
// Avoids recalculating on things like title changes
const recalculate = needsNewResults(data);
comparison.set("title", data.title);
comparison.set("datasource", data.datasource);
comparison.set("metrics", data.metrics);
comparison.set("conversionWindow", data.conversionWindow);
comparison.set("segment1", data.segment1);
comparison.set("segment2", data.segment2);
comparison.set("dateUpdated", new Date());
// Calculate results and update
if (recalculate) {
const datasource = await getDataSourceById(comparison.datasource);
if (!datasource || datasource.organization !== req.organization.id) {
return res.status(403).json({
status: 403,
message: "You do not have access to that datasource",
});
}
const integration = getSourceIntegrationObject(datasource);
const segments = await SegmentModel.find({
organization: req.organization.id,
datasource: datasource.id,
id: {
$in: [data.segment1.segment, data.segment2.segment],
},
});
let segment1: SegmentDocument, segment2: SegmentDocument;
segments.forEach((segment) => {
if (segment.id === data.segment1.segment) {
segment1 = segment;
}
if (segment.id === data.segment2.segment) {
segment2 = segment;
}
});
if (!segment1 || !segment2) {
throw new Error("Invalid segment selected");
}
const promises: Record<string, Promise<QueryDocument>> = {};
const segment1Params: UsersQueryParams = {
from: data.segment1.from,
name: segment1.name,
segmentQuery: segment1.sql,
segmentName: segment1.name,
userIdType: "user",
conversionWindow: comparison.conversionWindow,
to: data.segment1.to,
};
const segment2Params: UsersQueryParams = {
from: data.segment2.sameDateRange
? data.segment1.from
: data.segment2.from,
name: segment2.name,
segmentQuery: segment2.sql,
segmentName: segment2.name,
userIdType: "user",
conversionWindow: comparison.conversionWindow,
to: data.segment2.sameDateRange ? data.segment1.to : data.segment2.to,
};
// User counts for both segments
promises["users_segment1"] = getUsers(integration, segment1Params);
promises["users_segment2"] = getUsers(integration, segment2Params);
// Metric values
const metrics = await getMetricsByDatasource(comparison.datasource);
const selectedMetrics = metrics.filter((m) =>
comparison.metrics.includes(m.id)
);
selectedMetrics.forEach((metric) => {
promises[`${metric.id}_segment1`] = getMetricValue(integration, {
metric,
...segment1Params,
});
promises[`${metric.id}_segment2`] = getMetricValue(integration, {
metric,
...segment2Params,
});
});
comparison.set("runStarted", new Date());
const { queries, result } = await startRun(promises, (data) =>
processResults(comparison, data)
);
comparison.set("queries", queries);
if (result) {
comparison.set("results", result);
}
}
await comparison.save();
return res.status(200).json({
status: 200,
comparison: comparison,
});
}
export async function postSegmentComparisons(req: AuthRequest, res: Response) {
const doc: SegmentComparisonInterface = {
id: uniqid("sc_"),
title: "New Comparison",
segment1: {
segment: "",
from: new Date(),
to: new Date(),
},
segment2: {
segment: "",
from: new Date(),
to: new Date(),
sameDateRange: true,
},
datasource: null,
metrics: [],
conversionWindow: 3,
queries: [],
results: null,
organization: req.organization.id,
dateCreated: new Date(),
dateUpdated: new Date(),
runStarted: null,
};
const comparison = await SegmentComparisonModel.create(doc);
res.status(200).json({
status: 200,
id: comparison.id,
});
}

View File

@@ -0,0 +1,44 @@
import { Request, Response } from "express";
import { createIdea } from "../services/ideas";
import {
formatTextResponse,
getOrganizationFromSlackTeam,
getUserInfoBySlackId,
} from "../services/slack";
import { APP_ORIGIN } from "../util/secrets";
export async function postIdeas(req: Request, res: Response) {
try {
const organization = await getOrganizationFromSlackTeam(req.body.team_id);
const { id, name } = await getUserInfoBySlackId(
req.body.user_id,
organization
);
const text: string = req.body.text;
if (text.length < 3) {
throw new Error(
"Idea cannot be empty. Example usage: `/idea this is my cool idea`"
);
}
const idea = await createIdea({
text,
source: "slack",
details: "",
userId: id,
userName: name || req.body.user_name,
organization: organization.id,
tags: [],
votes: [],
});
res.json(
formatTextResponse(`Idea created! <${APP_ORIGIN}/idea/${idea.id}>`)
);
} catch (e) {
res.json(formatTextResponse(`*Error:* ${e.message}`));
}
}

View File

@@ -0,0 +1,193 @@
import { Request, Response } from "express";
import {
STRIPE_SECRET,
APP_ORIGIN,
STRIPE_PRICE,
STRIPE_WEBHOOK_SECRET,
STRIPE_DEFAULT_COUPON,
} from "../util/secrets";
import { Stripe } from "stripe";
import { AuthRequest } from "../types/AuthRequest";
import { OrganizationModel } from "../models/OrganizationModel";
import { createOrganization } from "../services/organizations";
const stripe = new Stripe(STRIPE_SECRET, { apiVersion: "2020-08-27" });
async function updateSubscription(subscription: string | Stripe.Subscription) {
// Make sure we have the full subscription object
if (typeof subscription === "string") {
subscription = await stripe.subscriptions.retrieve(subscription);
}
const stripeCustomerId =
typeof subscription.customer === "string"
? subscription.customer
: subscription.customer.id;
await OrganizationModel.updateOne(
{
stripeCustomerId,
},
{
$set: {
subscription: {
id: subscription.id,
qty: subscription.items.data[0].quantity,
trialEnd: subscription.trial_end
? new Date(subscription.trial_end * 1000)
: null,
status: subscription.status,
},
},
}
);
}
export async function postStartTrial(
req: AuthRequest<{ qty: number; name: string }>,
res: Response
) {
const { qty, name } = req.body;
// If user already has a subscription, return immediately
if (req.organization?.subscription?.id) {
return res.status(200).json({
status: 200,
});
}
try {
// Create organization first if needed
if (!req.organization) {
if (name.length < 3) {
throw new Error("Company name must be at least 3 characters long");
}
req.organization = await createOrganization(
req.email,
req.userId,
name,
""
);
}
// Create customer in Stripe if not exists
if (!req.organization.stripeCustomerId) {
const resp = await stripe.customers.create({
email: req.email,
name: req.name,
metadata: {
user: req.userId,
organization: req.organization.id,
},
});
req.organization.stripeCustomerId = resp.id;
await OrganizationModel.updateOne(
{
id: req.organization.id,
},
{
$set: {
stripeCustomerId: resp.id,
},
}
);
}
// Start subscription trial without payment method
const subscription = await stripe.subscriptions.create({
customer: req.organization.stripeCustomerId,
coupon: STRIPE_DEFAULT_COUPON,
collection_method: "charge_automatically",
trial_from_plan: true,
metadata: {
user: req.userId,
organization: req.organization.id,
},
items: [
{
price: STRIPE_PRICE,
quantity: qty,
},
],
});
// Save in Mongo
await updateSubscription(subscription);
res.status(200).json({ status: 200 });
} catch (e) {
res.status(400).json({
status: 400,
message: e.message,
});
}
}
export async function postCreateBillingSession(
req: AuthRequest,
res: Response
) {
try {
if (!req.permissions?.organizationSettings) {
res.status(403).json({
status: 403,
message: "Only admins can view and modify billing settings",
});
}
const { url } = await stripe.billingPortal.sessions.create({
customer: req.organization.stripeCustomerId,
return_url: `${APP_ORIGIN}/settings`,
});
res.status(200).json({
status: 200,
url,
});
} catch (e) {
res.status(400).json({
status: 400,
message: e.message,
});
}
}
export async function postWebhook(req: Request, res: Response) {
const payload: Buffer = req.body;
const sig = req.headers["stripe-signature"];
let event;
try {
event = stripe.webhooks.constructEvent(payload, sig, STRIPE_WEBHOOK_SECRET);
} catch (err) {
console.error(payload, sig);
console.error(err);
return res.status(400).send(`Webhook Error: ${err.message}`);
}
switch (event.type) {
case "checkout.session.completed": {
const { subscription } = event.data
.object as Stripe.Response<Stripe.Checkout.Session>;
updateSubscription(subscription);
break;
}
case "invoice.paid":
case "invoice.payment_failed": {
const { subscription } = event.data
.object as Stripe.Response<Stripe.Invoice>;
updateSubscription(subscription);
break;
}
case "customer.subscription.deleted":
case "customer.subscription.updated": {
const subscription = event.data
.object as Stripe.Response<Stripe.Subscription>;
updateSubscription(subscription);
break;
}
}
res.status(200).send("Ok");
}

View File

@@ -0,0 +1,189 @@
import { init } from "./app";
import { ExperimentModel } from "./models/ExperimentModel";
import {
createSnapshot,
getExperimentWatchers,
getLatestSnapshot,
getMetricById,
} from "./services/experiments";
import { getDataSourceById } from "./services/datasource";
import pino from "pino";
import { isEmailEnabled, sendExperimentChangesEmail } from "./services/email";
const MAX_UPDATES = 10;
const UPDATE_FREQUENCY = 360;
const parentLogger = pino();
const logger = parentLogger.child({
cron: true,
});
// TODO: await this
init();
logger.info("Cron started");
// Time out after 30 minutes
const timer = setTimeout(() => {
logger.warn("Cron Timeout");
process.exit(1);
}, 30 * 60 * 1000);
(async () => {
const latestDate = new Date();
latestDate.setMinutes(latestDate.getMinutes() - UPDATE_FREQUENCY);
const experiments = await ExperimentModel.find({
datasource: {
$exists: true,
$ne: "",
},
status: "running",
autoSnapshots: true,
lastSnapshotAttempt: {
$lte: latestDate,
},
})
.limit(MAX_UPDATES)
.sort({
lastSnapshotAttempt: 1,
});
const promises = experiments.map(async (experiment) => {
try {
logger.info({ experiment: experiment.id }, "Updating experiment - Start");
const datasource = await getDataSourceById(experiment.datasource);
const lastSnapshot = await getLatestSnapshot(
experiment.id,
experiment.phases.length - 1
);
const currentSnapshot = await createSnapshot(
experiment,
experiment.phases.length - 1,
datasource
);
logger.info(
{ experiment: experiment.id },
"Updating experiment - Success"
);
// check this and the previous snapshot to see if anything changed:
// asumptions:
// - that result[0] in the current snapshot is what we care about
// - that result[0] in the last snapshot is the same (could add a check for this)
const experimentChanges: string[] = [];
for (let i = 1; i < currentSnapshot.results[0].variations.length; i++) {
const curVar = currentSnapshot.results[0].variations[i];
const lastVar = lastSnapshot.results[0].variations[i];
for (const m in curVar.metrics) {
// sanity checks:
if (
lastVar.metrics[m] &&
lastVar.metrics[m].chanceToWin &&
curVar.metrics[m].value > 150
) {
// checks to see if anything changed:
if (
curVar.metrics[m].chanceToWin > 0.95 &&
lastVar.metrics[m].chanceToWin < 0.95
) {
// this test variation has gone significant, and won
experimentChanges.push(
"The metric " +
getMetricById(m) +
" for variation " +
experiment.variations[i].name +
" has reached a " +
(curVar.metrics[m].chanceToWin * 100).toFixed(1) +
"% chance to beat baseline"
);
} else if (
/* else if(curVar.metrics[m].chanceToWin < 0.85 && lastVar.metrics[m].chanceToWin > 0.95) {
// this test variation was significant, but is now not.
experimentChanges.push(
"The metric "+getMetricById(m)+" is no longer a significant improvement for variation "+experiment.variations[i].name+" ("+lastVar.metrics[m].chanceToWin.toFixed(3)+" to "+ curVar.metrics[m].chanceToWin.toFixed(3)+")"
);
} */
curVar.metrics[m].chanceToWin < 0.05 &&
lastVar.metrics[m].chanceToWin > 0.05
) {
// this test variation has gone significant, and lost
experimentChanges.push(
"The metric " +
getMetricById(m) +
" for variation " +
experiment.variations[i].name +
" has dropped to a " +
(curVar.metrics[m].chanceToWin * 100).toFixed(1) +
" chance to beat the baseline"
);
}
/*
else if(curVar.metrics[m].chanceToWin > 0.15 && lastVar.metrics[m].chanceToWin < 0.05) {
// this test was significant, and lost, but now hasn't.
experimentChanges.push(
"The metric "+getMetricById(m)+" is no longer significant for variation "+experiment.variations[i].name+" ("+lastVar.metrics[m].chanceToWin.toFixed(3)+" to "+ curVar.metrics[m].chanceToWin.toFixed(3)+")"
);
}
*/
}
}
}
if (experimentChanges.length) {
// send an email to any subscribers on this test:
logger.info(
{ experiment: experiment.id },
"Significant change - detected " +
experimentChanges.length +
" significant changes"
);
if (!isEmailEnabled()) {
logger.error(
{ experiment: experiment.id },
"Significant change - not sending as email not enabled"
);
} else {
const watchers = await getExperimentWatchers(experiment.id);
const userIds = watchers.map((w) => w.userId);
try {
await sendExperimentChangesEmail(
userIds,
experiment.id,
experiment.name,
experimentChanges
);
} catch (e) {
logger.error(
{ experiment: experiment.id },
"Significant change - Email sending failure:"
);
logger.error({ experiment: experiment.id }, e.message);
}
}
}
} catch (e) {
logger.error(
{ experiment: experiment.id },
"Updating experiment - Failure"
);
try {
experiment.autoSnapshots = false;
experiment.markModified("autoSnapshots");
await experiment.save();
// TODO: email user and let them know it failed
} catch (e) {
logger.error({ experiment: experiment.id }, e.message);
}
}
});
await Promise.all(promises);
logger.info("Cron finished");
clearTimeout(timer);
process.exit(0);
})();

View File

@@ -0,0 +1,14 @@
import { MONGODB_URI } from "../util/secrets";
import mongoose from "mongoose";
import bluebird from "bluebird";
mongoose.Promise = bluebird;
export default async () => {
// Connect to MongoDB
return await mongoose.connect(MONGODB_URI, {
useNewUrlParser: true,
useCreateIndex: true,
useUnifiedTopology: true,
});
};

View File

@@ -0,0 +1,38 @@
import { decryptDataSourceParams } from "../services/datasource";
import { runAthenaQuery } from "../services/athena";
import SqlIntegration from "./SqlIntegration";
import { AthenaConnectionParams } from "../../types/integrations/athena";
export default class Athena extends SqlIntegration {
params: AthenaConnectionParams;
setParams(encryptedParams: string) {
this.params = decryptDataSourceParams<AthenaConnectionParams>(
encryptedParams
);
}
getNonSensitiveParams(): Partial<AthenaConnectionParams> {
return {
...this.params,
accessKeyId: undefined,
secretAccessKey: undefined,
};
}
toTimestamp(date: Date) {
return `from_iso8601_timestamp('${date.toISOString()}')`;
}
runQuery(sql: string) {
return runAthenaQuery(this.params, sql);
}
addDateInterval(col: string, days: number) {
return `${col} + INTERVAL '${days}' day`;
}
subtractHalfHour(col: string) {
return `${col} - INTERVAL '30' minute`;
}
regexMatch(col: string, regex: string) {
return `regexp_like(${col}, '${regex}')`;
}
percentile(col: string, percentile: number) {
return `approx_percentile(${col}, ${percentile})`;
}
}

View File

@@ -0,0 +1,60 @@
import { decryptDataSourceParams } from "../services/datasource";
import * as bq from "@google-cloud/bigquery";
import SqlIntegration from "./SqlIntegration";
import { BigQueryConnectionParams } from "../../types/integrations/bigquery";
export default class BigQuery extends SqlIntegration {
params: BigQueryConnectionParams;
setParams(encryptedParams: string) {
this.params = decryptDataSourceParams<BigQueryConnectionParams>(
encryptedParams
);
}
getNonSensitiveParams(): Partial<BigQueryConnectionParams> {
// TODO: remove sensitive params
return {
...this.params,
privateKey: undefined,
};
}
async runQuery(sql: string) {
const client = new bq.BigQuery({
projectId: this.params.projectId,
credentials: {
client_email: this.params.clientEmail,
private_key: this.params.privateKey,
},
});
const [job] = await client.createQueryJob({
query: sql,
useLegacySql: false,
});
const [rows] = await job.getQueryResults();
return rows;
}
toTimestamp(date: Date) {
return `DATETIME "${date.toISOString().substr(0, 19).replace("T", " ")}"`;
}
addDateInterval(col: string, days: number) {
return `DATETIME_ADD(${col}, INTERVAL ${days} DAY)`;
}
subtractHalfHour(col: string) {
return `DATETIME_SUB(${col}, INTERVAL 30 MINUTE)`;
}
regexMatch(col: string, regex: string) {
return `REGEXP_CONTAINS(${col}, r"${regex}")`;
}
percentile(col: string, percentile: number) {
return `APPROX_QUANTILES(${col}, 100)[OFFSET(${Math.floor(
percentile * 100
)})]`;
}
dateTrunc(col: string) {
return `date_trunc(${col}, DAY)`;
}
dateDiff(startCol: string, endCol: string) {
return `date_diff(${endCol}, ${startCol}, DAY)`;
}
}

View File

@@ -0,0 +1,325 @@
import {
SourceIntegrationConstructor,
SourceIntegrationInterface,
ExperimentResults,
ImpactEstimationResult,
UsersQueryParams,
MetricValueParams,
UsersResult,
MetricValueResult,
VariationResult,
MetricValueResultDate,
PastExperimentResult,
} from "../types/Integration";
import { GoogleAnalyticsParams } from "../../types/integrations/googleanalytics";
import { decryptDataSourceParams } from "../services/datasource";
import { EventInterface } from "../models/TrackTableModel";
import { google } from "googleapis";
import {
GOOGLE_OAUTH_CLIENT_ID,
GOOGLE_OAUTH_CLIENT_SECRET,
APP_ORIGIN,
} from "../util/secrets";
import { DataSourceProperties } from "../../types/datasource";
import { ExperimentInterface, ExperimentPhase } from "../../types/experiment";
import { MetricInterface } from "../../types/metric";
export function getOauth2Client() {
return new google.auth.OAuth2(
GOOGLE_OAUTH_CLIENT_ID,
GOOGLE_OAUTH_CLIENT_SECRET,
`${APP_ORIGIN}/oauth/google`
);
}
const GoogleAnalytics: SourceIntegrationConstructor = class
implements SourceIntegrationInterface {
params: GoogleAnalyticsParams;
datasource: string;
organization: string;
constructor(encryptedParams: string) {
this.params = decryptDataSourceParams<GoogleAnalyticsParams>(
encryptedParams
);
}
getPastExperimentQuery(): string {
throw new Error("Method not implemented.");
}
runPastExperimentQuery(): Promise<PastExperimentResult> {
throw new Error("Method not implemented.");
}
getUsersQuery(params: UsersQueryParams): string {
// TODO: support segments and url regex
return JSON.stringify(
{
viewId: this.params.viewId,
dateRanges: [
{
startDate: params.from.toISOString().substr(0, 10),
endDate: params.to.toISOString().substr(0, 10),
},
],
metrics: [
{
expression: "ga:users",
},
],
dimensions: [
{
name: "ga:date",
},
],
},
null,
2
);
}
getMetricValueQuery(params: MetricValueParams): string {
// TODO: support segments and url regex
return JSON.stringify(
{
viewId: this.params.viewId,
dateRanges: [
{
startDate: params.from.toISOString().substr(0, 10),
endDate: params.to.toISOString().substr(0, 10),
},
],
metrics: [
{
expression: params.metric.table,
},
{
expression: "ga:users",
},
],
dimensions: [
{
name: "ga:date",
},
],
},
null,
2
);
}
async runUsersQuery(query: string): Promise<UsersResult> {
const { rows } = await this.runQuery(query);
const dates: { date: string; users: number }[] = [];
let totalUsers = 0;
if (rows) {
rows.forEach((row) => {
const date = row.dimensions[0] + "T12:00:00Z";
const users = parseFloat(row.metrics[0].values[0]);
totalUsers += users;
dates.push({
date,
users,
});
});
}
return {
users: totalUsers,
dates,
};
}
async runMetricValueQuery(query: string): Promise<MetricValueResult> {
const { rows, metrics } = await this.runQuery(query);
const dates: MetricValueResultDate[] = [];
if (rows) {
const metric = metrics[0];
const isTotal =
metric && metric !== "ga:bounceRate" && !metric.match(/^ga:avg/);
const isBinomial =
metric &&
(metric === "ga:bounceRate" ||
metric.match(/^ga:goal.*(Starts|Completions)$/));
const isDuration =
metric &&
["ga:avgPageLoadTime", "avgSessionDuration", "avgTimeOnPage"].includes(
metric
);
rows.forEach((row) => {
const date = row.dimensions[0] + "T12:00:00Z";
const value = parseFloat(row.metrics[0].values[0]);
const users = parseInt(row.metrics[1].values[0]);
let count = 0;
let mean = 0;
let stddev = 0;
if (metric === "ga:bounceRate") {
count = Math.round((users * value) / 100);
mean = 1;
} else if (isBinomial) {
count = value;
mean = 1;
} else if (isDuration) {
count = users;
mean = value;
stddev = mean;
} else if (isTotal) {
count = users;
mean = value / users;
} else {
count = users;
mean = value;
}
dates.push({
date,
count,
mean,
stddev,
});
});
}
return {
dates,
};
}
async runQuery(query: string) {
const result = await google.analyticsreporting("v4").reports.batchGet({
auth: this.getAuth(),
requestBody: {
reportRequests: [JSON.parse(query)],
},
});
return {
metrics: (
result?.data?.reports[0]?.columnHeader?.metricHeader
?.metricHeaderEntries || []
).map((m) => m.name),
rows: result?.data?.reports[0]?.data?.rows,
};
}
getSourceProperties(): DataSourceProperties {
return {
includeInConfig: true,
readonlyFields: [],
type: "api",
queryLanguage: "json",
metricCaps: false,
};
}
async getLatestEvents(): Promise<EventInterface[]> {
throw new Error("Not implemented");
}
async testConnection(): Promise<boolean> {
this.getAuth();
return true;
}
getNonSensitiveParams(): Partial<GoogleAnalyticsParams> {
return {
customDimension: this.params.customDimension,
viewId: this.params.viewId,
};
}
getAuth() {
const client = getOauth2Client();
client.setCredentials({
// eslint-disable-next-line
refresh_token: this.params.refreshToken
});
return client;
}
async getImpactEstimation(): Promise<ImpactEstimationResult> {
throw new Error("Not implemented for GA");
}
async getExperimentResults(
experiment: ExperimentInterface,
phase: ExperimentPhase,
metrics: MetricInterface[]
): Promise<ExperimentResults> {
const metricExpressions = metrics.map((m) => ({
expression: m.table,
}));
const query = {
viewId: this.params.viewId,
dateRanges: [
{
startDate: phase.dateStarted.toISOString().substr(0, 10),
endDate: (phase.dateEnded || new Date()).toISOString().substr(0, 10),
},
],
metrics: [
{
expression: "ga:users",
},
...metricExpressions,
],
dimensions: [
{
name: `ga:dimension${this.params.customDimension}`,
},
],
};
const result = await google.analyticsreporting("v4").reports.batchGet({
auth: this.getAuth(),
requestBody: {
reportRequests: [query],
},
});
const rows: VariationResult[] = [];
const raw = result?.data?.reports[0]?.data?.rows;
if (!raw) {
throw new Error("Failed to update");
}
raw.forEach((row, i) => {
if (i >= experiment.variations.length) return;
row.dimensions[0] = `myexp:${i}`;
const users = parseInt(row.metrics[0].values[0]);
rows.push({
variation: parseInt(row.dimensions[0].split(":", 2)[1]),
users,
metrics: metrics.map((metric, j) => {
let value = parseFloat(row.metrics[0].values[j + 1]);
if (metric.table === "ga:bounceRate") {
value = (users * value) / 100;
} else if (metric.table.match(/^ga:avg/)) {
value = users * value;
}
const mean = Math.round(value) / users;
// If the metric is duration, we can assume an exponential distribution and the stddev equals the mean
const stddev = metric.type === "duration" ? mean : 0;
return {
metric: metric.id,
count: users,
mean,
stddev,
};
}),
});
});
return {
results: [
{
dimension: "All",
variations: rows,
},
],
query: JSON.stringify(query, null, 2),
};
}
};
export default GoogleAnalytics;

View File

@@ -0,0 +1,754 @@
import {
DataSourceProperties,
DataSourceSettings,
} from "../../types/datasource";
import { DimensionInterface } from "../../types/dimension";
import { ExperimentInterface, ExperimentPhase } from "../../types/experiment";
import { MixpanelConnectionParams } from "../../types/integrations/mixpanel";
import { MetricInterface } from "../../types/metric";
import { SegmentInterface } from "../../types/segment";
import { decryptDataSourceParams } from "../services/datasource";
import { formatQuery, runQuery } from "../services/mixpanel";
import {
DimensionResult,
ExperimentResults,
ImpactEstimationResult,
MetricValueParams,
MetricValueResult,
PastExperimentResult,
SourceIntegrationInterface,
UsersQueryParams,
UsersResult,
} from "../types/Integration";
const percentileNumbers = [
0.01,
0.05,
0.1,
0.2,
0.3,
0.4,
0.5,
0.6,
0.7,
0.8,
0.9,
0.95,
0.99,
];
export default class Mixpanel implements SourceIntegrationInterface {
datasource: string;
params: MixpanelConnectionParams;
organization: string;
settings: DataSourceSettings;
constructor(encryptedParams: string, settings: DataSourceSettings) {
this.params = decryptDataSourceParams<MixpanelConnectionParams>(
encryptedParams
);
this.settings = {
default: {
userIdColumn: "unique_id",
},
experiments: {
experimentIdColumn: "Experiment name",
table: "$experiment_started",
variationColumn: "Variant name",
variationFormat: "index",
...settings.experiments,
},
pageviews: {
table: "Page view",
urlColumn: "$current_url",
...settings.pageviews,
},
};
}
async getExperimentResults(
experiment: ExperimentInterface,
phase: ExperimentPhase,
metrics: MetricInterface[],
activationMetric: MetricInterface,
dimension: DimensionInterface
): Promise<ExperimentResults> {
const hasEarlyStartMetrics = metrics.filter((m) => m.earlyStart).length > 0;
const onActivate = `
${activationMetric ? "state.activated = true;" : ""}
state.start = e.time;
${
hasEarlyStartMetrics
? ` // Process queued values
state.queuedEvents.forEach((q) => {
// Make sure event happened during the same session (within 30 minutes)
if(state.start - q.time > ${30 * 60 * 1000}) return;
${metrics
.filter((m) => m.earlyStart)
.map(
(metric, i) => `// Metric - ${metric.name}
if(${this.getValidMetricCondition(metric, "q")}) {
${this.getMetricAggregationCode(
metric,
this.getMetricValueCode(metric, "q"),
`state.m${i}`
)}
}`
)
.join("\n")}
});
state.queuedEvents = [];`
: ""
}`;
const query = formatQuery(`// Experiment results - ${experiment.name}
const metrics = ${JSON.stringify(
metrics.map(({ id, name }) => ({ id, name })),
null,
2
)};
return ${this.getEvents(phase.dateStarted, phase.dateEnded || new Date())}
.filter(function(e) {
if(${this.getValidExperimentCondition(
experiment.trackingKey
)}) return true;
${
activationMetric
? `if(${this.getValidMetricCondition(
activationMetric
)}) return true;`
: ""
}
${metrics
.map(
(metric) => `// Metric - ${metric.name}
if(${this.getValidMetricCondition(metric)}) return true;`
)
.join("\n")}
return false;
})
// Metric value per user
.groupByUser(function(state, events) {
state = state || {
inExperiment: false,
${dimension ? "dimension: null," : ""}
${activationMetric ? "activated: false," : ""}
start: null,
variation: null,
${metrics.map((m, i) => `m${i}: null,`).join("\n")} ${
hasEarlyStartMetrics ? "queuedEvents: []" : ""
}
};
for(var i=0; i<events.length; i++) {
const e = events[i];
// User is put into the experiment
if(!state.inExperiment && ${this.getValidExperimentCondition(
experiment.trackingKey
)}) {
state.inExperiment = true;
state.variation = ${this.getPropertyColumn(
this.settings.experiments.variationColumn || "Variant name",
"e"
)};
${
dimension
? `state.dimension = ${this.getPropertyColumn(
dimension.sql,
"e"
)} || null;`
: ""
}
${activationMetric ? "" : onActivate}
continue;
}
// Not in the experiment yet
if(!state.inExperiment) {
${hasEarlyStartMetrics ? "state.queuedEvents.push(e);" : ""}
continue;
}
${
activationMetric
? `
// Not activated yet
if(!state.activated) {
// Does this event activate it? (Metric - ${
activationMetric.name
})
if(${this.getValidMetricCondition(activationMetric)}) {
${onActivate}
}
else {
${hasEarlyStartMetrics ? "state.queuedEvents.push(e);" : ""}
continue;
}
}
`
: ""
}
${this.getConversionWindowCheck(
experiment.conversionWindowDays || 3,
"state.start"
)}
${metrics
.map(
(metric, i) => `// Metric - ${metric.name}
if(${this.getValidMetricCondition(metric)}) {
${this.getMetricAggregationCode(
metric,
this.getMetricValueCode(metric),
`state.m${i}`
)}
}
`
)
.join("")}
}
return state;
})
// Remove users that are not in the experiment
.filter(function(ev) {
if(!ev.value.inExperiment) return false;
if(ev.value.variation === null || ev.value.variation === undefined) return false;
${activationMetric ? "if(!ev.value.activated) return false;" : ""}
return true;
})
// One group per experiment variation${
dimension ? "/dimension" : ""
} with summary data
.groupBy(["value.variation"${dimension ? ', "value.dimension"' : ""}], [
// Total users in the group
mixpanel.reducer.count(),
${metrics
.map(
(metric, i) => `// Metric - ${metric.name}
mixpanel.reducer.numeric_summary('value.m${i}'),`
)
.join("\n")}
])
// Convert to an object that's easier to work with
.map(row => {
const ret = {
variation: row.key[0],
dimension: ${dimension ? "row.key[1] || ''" : "''"},
users: row.value[0],
metrics: [],
};
for(let i=1; i<row.value.length; i++) {
ret.metrics.push({
id: metrics[i-1].id,
name: metrics[i-1].name,
count: row.value[i].count,
mean: row.value[i].avg,
stddev: row.value[i].stddev,
});
}
return ret;
});
`);
const result = await runQuery<
{
variation: string;
dimension: string;
users: number;
metrics: {
id: string;
name: string;
count: number;
mean: number;
stddev: number;
}[];
}[]
>(this.params, query);
const variationKeyMap = new Map<string, number>();
experiment.variations.forEach((v, i) => {
variationKeyMap.set(v.key, i);
});
const dimensions: { [key: string]: DimensionResult } = {};
result.forEach((row) => {
dimensions[row.dimension] = dimensions[row.dimension] || {
dimension: row.dimension,
variations: [],
};
dimensions[row.dimension].variations.push({
variation:
this.settings.experiments.variationFormat === "key"
? variationKeyMap.get(row.variation)
: parseInt(row.variation),
users: row.users || 0,
metrics: row.metrics.map((m) => ({
metric: m.id,
count: m.count,
mean: m.mean,
stddev: m.stddev,
})),
});
});
return {
query,
results: Object.values(dimensions),
};
}
async testConnection(): Promise<boolean> {
const today = new Date().toISOString().substr(0, 10);
const query = formatQuery(`
return Events({
from_date: "${today}",
to_date: "${today}"
})
.reduce(mixpanel.reducer.count());
`);
await runQuery(this.params, query);
return true;
}
getSourceProperties(): DataSourceProperties {
return {
includeInConfig: true,
readonlyFields: [],
type: "api",
queryLanguage: "javascript",
metricCaps: true,
};
}
async getImpactEstimation(
urlRegex: string,
metric: MetricInterface,
segment?: SegmentInterface
): Promise<ImpactEstimationResult> {
const numDays = 30;
// Ignore last 3 days of data since we need to give people time to convert
const end = new Date();
end.setDate(end.getDate() - 3);
const start = new Date();
start.setDate(start.getDate() - numDays - 3);
const baseSettings = {
from: start,
to: end,
includeByDate: false,
userIdType: metric.userIdType,
conversionWindow: 3,
};
const usersQuery = this.getUsersQuery({
...baseSettings,
name: "Traffic - Selected Pages and Segment",
urlRegex,
segmentQuery: segment?.sql || null,
segmentName: segment?.name,
});
const metricQuery = this.getMetricValueQuery({
...baseSettings,
name: "Metric Value - Entire Site",
metric,
includePercentiles: false,
});
const valueQuery = this.getMetricValueQuery({
...baseSettings,
name: "Metric Value - Selected Pages and Segment",
metric,
includePercentiles: false,
urlRegex,
segmentQuery: segment?.sql || null,
segmentName: segment?.name,
});
const [users, metricTotal, value] = await Promise.all([
this.runUsersQuery(usersQuery),
this.runMetricValueQuery(metricQuery),
this.runMetricValueQuery(valueQuery),
]);
const formatted =
[usersQuery, metricQuery, valueQuery]
.map((code) => formatQuery(code))
.join("\n\n\n") + ";";
if (users && metricTotal && value) {
return {
query: formatted,
users: users.users / numDays || 0,
value: (value.count * value.mean) / numDays || 0,
metricTotal: (metricTotal.count * metricTotal.mean) / numDays || 0,
};
}
return {
query: formatted,
users: 0,
value: 0,
metricTotal: 0,
};
}
getUsersQuery(params: UsersQueryParams): string {
return formatQuery(`
// ${params.name} - Number of Users
return ${this.getEvents(params.from, params.to)}
.filter(function(event) {
${
params.segmentQuery
? `// Limit to Segment - ${params.segmentName}
if(!(${params.segmentQuery})) return false;`
: ""
}
// Valid page view
if(${this.getValidPageCondition(params.urlRegex)}) return true;
return false;
})
// One event per user
.groupByUser(mixpanel.reducer.min("time"))
.reduce([
// Overall count of users
mixpanel.reducer.count()${
params.includeByDate
? `,
// Count of users per day
(prevs, events) => {
const dates = {};
prevs.forEach(prev => {
prev.dates.forEach(d=>dates[d.date] = (dates[d.date] || 0) + d.users)
});
events.forEach(e=>{
const date = (new Date(e.value)).toISOString().substr(0,10);
dates[date] = (dates[date] || 0) + 1;
});
return {
type: "byDate",
dates: Object.keys(dates).map(d => ({
date: d,
users: dates[d]
}))
};
}`
: ""
}
])
// Transform into easy-to-use objects
.map(vals => vals.map(val => !val.type ? {type:"overall",users:val} : val))
`);
}
getMetricValueQuery(params: MetricValueParams): string {
const metric = params.metric;
return formatQuery(`
// ${params.name} - Metric value (${metric.name})
return ${this.getEvents(params.from, params.to)}
.filter(function(event) {
${
params.segmentQuery
? `// Limit to Segment - ${params.segmentName}
if(!(${params.segmentQuery})) return false;`
: ""
}
// Valid page view
if(${this.getValidPageCondition(params.urlRegex)}) return true;
if(${this.getValidMetricCondition(metric, "event")}) return true;
return false;
})
// Metric value per user
.groupByUser(function(state, events) {
state = state || {firstPageView: false, metricValue: null, queuedValues: []};
for(var i=0; i<events.length; i++) {
if(!state.firstPageView && ${this.getValidPageCondition(
params.urlRegex,
"events[i]"
)}) {
state.firstPageView = events[i].time;
// Process queued values
state.queuedValues.forEach((q) => {
${this.getConversionWindowCheck(
params.conversionWindow,
"state.firstPageView",
"q.time",
"return"
)}
${this.getMetricAggregationCode(metric, "q.value")}
});
state.queuedValues = [];
${metric.earlyStart ? "" : "continue;"}
}
if(${this.getValidMetricCondition(metric, "events[i]")}) {
if(!state.firstPageView) {
${
metric.earlyStart
? `state.queuedValues.push({value: ${this.getMetricValueCode(
metric
)}, time: events[i].time});`
: ""
}
continue;
}
${this.getConversionWindowCheck(
params.conversionWindow,
"state.firstPageView"
)}
${this.getMetricAggregationCode(
metric,
this.getMetricValueCode(metric)
)}
}
}
return state;
})
// Remove users that did not convert
.filter(function(ev) {
return ev.value.firstPageView && ev.value.metricValue !== null;
})
.reduce([
// Overall summary metrics
mixpanel.reducer.numeric_summary('value.metricValue')${
params.includeByDate
? `,
// Summary metrics by date
(prevs, events) => {
const dates = {};
prevs.forEach(prev => {
prev.dates.forEach(d=>{
dates[d.date] = dates[d.date] || {count:0, sum:0};
dates[d.date].count += d.count;
dates[d.date].sum += d.sum;
})
});
events.forEach(e=>{
const date = (new Date(e.value.firstPageView)).toISOString().substr(0,10);
dates[date] = dates[date] || {count:0, sum:0};
dates[date].count++;
dates[date].sum += e.value.metricValue;
});
return {
type: "byDate",
dates: Object.keys(dates).map(d => ({
date: d,
...dates[d]
}))
};
}`
: ""
}${
params.includePercentiles && metric.type !== "binomial"
? `,
// Percentile breakdown
mixpanel.reducer.numeric_percentiles(
"value.metricValue",
${JSON.stringify(percentileNumbers.map((n) => n * 100))}
)`
: ""
}
])
// Transform into easy-to-use objects
.map(vals => vals.map(val => {
if(val[0] && val[0].percentile) return {type: "percentile",percentiles:val};
if(val.count) return {type: "overall", ...val};
return val;
}));
`);
}
async runUsersQuery(query: string): Promise<UsersResult> {
const rows = await runQuery<
[
(
| {
type: "byDate";
dates: {
date: string;
users: number;
}[];
}
| {
type: "overall";
users: number;
}
)[]
]
>(this.params, query);
const result: UsersResult = { users: 0 };
rows &&
rows[0] &&
rows[0].forEach((row) => {
if (row.type === "overall") {
result.users = row.users;
} else if (row.type === "byDate") {
row.dates.sort((a, b) => a.date.localeCompare(b.date));
result.dates = row.dates;
}
});
return result;
}
async runMetricValueQuery(query: string): Promise<MetricValueResult> {
const rows = await runQuery<
[
(
| {
type: "byDate";
dates: {
date: string;
count: number;
sum: number;
}[];
}
| {
type: "overall";
count: number;
sum: number;
avg: number;
stddev: number;
}
| {
type: "percentile";
percentiles: {
percentile: number;
value: number;
}[];
}
)[]
]
>(this.params, query);
const result: MetricValueResult = {};
rows &&
rows[0] &&
rows[0].forEach((row) => {
if (row.type === "overall") {
result.count = row.count;
result.mean = row.avg;
result.stddev = row.stddev;
} else if (row.type === "byDate") {
result.dates = [];
row.dates.sort((a, b) => a.date.localeCompare(b.date));
row.dates.forEach(({ date, count, sum }) => {
result.dates.push({
date,
count,
mean: count > 0 ? sum / count : 0,
});
});
} else if (row.type === "percentile") {
result.percentiles = {};
row.percentiles.forEach(({ percentile, value }) => {
result.percentiles[percentile + ""] = value;
});
}
});
return result;
}
getPastExperimentQuery(from: Date): string {
console.log(from);
throw new Error("Method not implemented.");
}
async runPastExperimentQuery(query: string): Promise<PastExperimentResult> {
console.log(query);
throw new Error("Method not implemented.");
}
getNonSensitiveParams(): Partial<MixpanelConnectionParams> {
return {
...this.params,
secret: undefined,
};
}
private getMetricValueCode(
metric: MetricInterface,
eventVar: string = "events[i]"
) {
return metric.column
? this.getPropertyColumn(metric.column, eventVar) + "||0"
: "1";
}
private getMetricAggregationCode(
metric: MetricInterface,
value: string,
destVar: string = "state.metricValue"
) {
const cap = metric.type === "binomial" ? 1 : metric.cap;
return `${destVar} = ${
cap ? `Math.min(${cap},` : ""
}(${destVar} || 0) + ${value}${cap ? ")" : ""};`;
}
private getConversionWindowCheck(
conversionWindow: number,
startVar: string,
eventTimeVar: string = "events[i].time",
onFail: string = "continue;"
) {
return `// Check conversion window (${conversionWindow} days)
if(${eventTimeVar} - ${startVar} > ${
conversionWindow * 24 * 60 * 60 * 1000
}) {
${onFail}
}`;
}
private getEvents(from: Date, to: Date) {
return `Events({from_date: "${from
.toISOString()
.substr(0, 10)}", to_date: "${to.toISOString().substr(0, 10)}"})`;
}
private getValidPageCondition(urlRegex?: string, event: string = "event") {
if (urlRegex && urlRegex !== ".*") {
const urlCol = this.settings.pageviews.urlColumn;
return `${event}.name === "${
this.settings.pageviews.table || "Page view"
}" && ${event}.properties["${urlCol}"] && ${event}.properties["${urlCol}"].match(/${urlRegex}/)`;
} else {
return `${event}.name === "${
this.settings.pageviews.table || "Page view"
}"`;
}
}
private getPropertyColumn(col: string, event: string = "e") {
const colAccess = col.split(".").map((part) => {
if (part.substr(0, 1) !== "[") return `["${part}"]`;
return part;
});
return `${event}.properties${colAccess}`;
}
private getValidMetricCondition(
metric: MetricInterface,
event: string = "e"
) {
const checks: string[] = [];
// Right event name
checks.push(`${event}.name === "${metric.table}"`);
if (metric.conditions) {
metric.conditions.forEach((cond) => {
const check = ["~", "!~"].includes(cond.operator)
? `.match(/${cond.value}/)`
: ` ${cond.operator} ${JSON.stringify(cond.value)}`;
checks.push(
`${cond.operator === "!~" ? "!" : ""}${this.getPropertyColumn(
cond.column,
event
)}${check}`
);
});
}
return checks.join(" && ");
}
private getValidExperimentCondition(id: string, event: string = "e") {
const experimentEvent =
this.settings.experiments.table || "$experiment_started";
const experimentIdCol = this.getPropertyColumn(
this.settings.experiments.experimentIdColumn || "Experiment name",
event
);
return `${event}.name === "${experimentEvent}" && ${experimentIdCol} === "${id}"`;
}
}

View File

@@ -0,0 +1,34 @@
import { PostgresConnectionParams } from "../../types/integrations/postgres";
import { decryptDataSourceParams } from "../services/datasource";
import { runPostgresQuery } from "../services/postgres";
import SqlIntegration from "./SqlIntegration";
export default class Postgres extends SqlIntegration {
params: PostgresConnectionParams;
setParams(encryptedParams: string) {
this.params = decryptDataSourceParams<PostgresConnectionParams>(
encryptedParams
);
}
getNonSensitiveParams(): Partial<PostgresConnectionParams> {
return {
...this.params,
password: undefined,
};
}
runQuery(sql: string) {
return runPostgresQuery(this.params, sql);
}
getFullTableName(table: string): string {
if (this.params.defaultSchema && !table.match(/\./)) {
return this.params.defaultSchema + "." + table;
}
return table;
}
percentile(col: string, percentile: number) {
return `PERCENTILE_DISC ( ${percentile} ) WITHIN GROUP (ORDER BY ${col})`;
}
dateDiff(startCol: string, endCol: string) {
return `${endCol}::DATE - ${startCol}::DATE`;
}
}

View File

@@ -0,0 +1,31 @@
import { PostgresConnectionParams } from "../../types/integrations/postgres";
import { decryptDataSourceParams } from "../services/datasource";
import { runPostgresQuery } from "../services/postgres";
import SqlIntegration from "./SqlIntegration";
export default class Redshift extends SqlIntegration {
params: PostgresConnectionParams;
setParams(encryptedParams: string) {
this.params = decryptDataSourceParams<PostgresConnectionParams>(
encryptedParams
);
}
getNonSensitiveParams(): Partial<PostgresConnectionParams> {
return {
...this.params,
password: undefined,
};
}
runQuery(sql: string) {
return runPostgresQuery(this.params, sql);
}
getFullTableName(table: string): string {
if (this.params.defaultSchema && !table.match(/\./)) {
return this.params.defaultSchema + "." + table;
}
return table;
}
percentile(col: string, percentile: number) {
return `APPROXIMATE PERCENTILE_DISC ( ${percentile} ) WITHIN GROUP (ORDER BY ${col})`;
}
}

View File

@@ -0,0 +1,43 @@
import { SnowflakeConnectionParams } from "../../types/integrations/snowflake";
import { decryptDataSourceParams } from "../services/datasource";
import { runSnowflakeQuery } from "../services/snowflake";
import SqlIntegration from "./SqlIntegration";
export default class Snowflake extends SqlIntegration {
params: SnowflakeConnectionParams;
setParams(encryptedParams: string) {
this.params = decryptDataSourceParams<SnowflakeConnectionParams>(
encryptedParams
);
}
getNonSensitiveParams(): Partial<SnowflakeConnectionParams> {
return {
...this.params,
password: undefined,
};
}
runQuery(sql: string) {
return runSnowflakeQuery(this.params, sql);
}
percentile(col: string, percentile: number) {
return `APPROX_PERCENTILE(${col}, ${percentile})`;
}
regexMatch(col: string, regex: string) {
// Snowflake automatically adds `$` to the end of the regex
// If specified, remove it. Otherwise, injext .* before the end to match intended behavior
if (regex.substr(-1) === "$") {
regex = regex.substr(0, regex.length - 1);
} else {
regex += ".*";
}
// Same with '^' at the beginning
if (regex.substr(0, 1) === "^") {
regex = regex.substr(1);
} else {
regex = ".*" + regex;
}
return `rlike(${col}, '${regex}')`;
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,16 @@
import mongoose from "mongoose";
import { ApiKeyInterface } from "../../types/apikey";
const apiKeySchema = new mongoose.Schema({
key: String,
description: String,
organization: String,
dateCreated: Date,
});
export type ApiKeyDocument = mongoose.Document & ApiKeyInterface;
export const ApiKeyModel = mongoose.model<ApiKeyDocument>(
"ApiKey",
apiKeySchema
);

View File

@@ -0,0 +1,30 @@
import mongoose from "mongoose";
import { AuditInterface } from "../../types/audit";
const auditSchema = new mongoose.Schema({
id: String,
organization: String,
user: {
_id: false,
id: String,
email: String,
name: String,
},
event: String,
entity: {
_id: false,
object: String,
id: String,
},
parent: {
_id: false,
object: String,
id: String,
},
details: String,
dateCreated: Date,
});
export type AuditDocument = mongoose.Document & AuditInterface;
export const AuditModel = mongoose.model<AuditDocument>("Audit", auditSchema);

View File

@@ -0,0 +1,93 @@
import { Request, Response } from "express";
import mongoose from "mongoose";
import crypto from "crypto";
export interface AuthRefreshInterface {
token: string;
userId: string;
userAgent: string;
ip: string;
createdAt: Date;
lastLogin: Date;
}
const authRefreshSchema = new mongoose.Schema({
token: {
type: String,
unique: true,
},
userId: String,
userAgent: String,
ip: String,
createdAt: {
type: Date,
// Refresh is valid for 30 days
expires: 30 * 24 * 60 * 60,
},
lastLogin: Date,
});
export type AuthRefreshDocument = mongoose.Document & AuthRefreshInterface;
export const AuthRefreshModel = mongoose.model<AuthRefreshDocument>(
"AuthRefresh",
authRefreshSchema
);
export async function createRefreshToken(
req: Request,
res: Response,
userId: string
) {
const token = crypto.randomBytes(32).toString("base64");
const authRefreshDoc: AuthRefreshInterface = {
createdAt: new Date(),
lastLogin: new Date(),
userId,
ip: req.ip,
userAgent: req.headers["user-agent"] || "",
token,
};
await AuthRefreshModel.create(authRefreshDoc);
res.cookie("AUTH_REFRESH_TOKEN", token, {
httpOnly: true,
maxAge: 30 * 24 * 60 * 60 * 1000,
secure: req.secure,
});
}
export async function deleteRefreshToken(req: Request, res: Response) {
const refreshToken = req.cookies["AUTH_REFRESH_TOKEN"];
if (refreshToken) {
await AuthRefreshModel.deleteOne({
token: refreshToken,
});
}
res.clearCookie("AUTH_REFRESH_TOKEN");
}
export async function getUserIdFromAuthRefreshToken(
token: string
): Promise<string> {
const doc = await AuthRefreshModel.findOne({
token,
});
if (doc) {
await AuthRefreshModel.updateOne(
{
_id: doc._id,
},
{
$set: {
lastLogin: new Date(),
},
}
);
}
return doc?.userId || "";
}

View File

@@ -0,0 +1,50 @@
import mongoose from "mongoose";
import { DataSourceInterface } from "../../types/datasource";
const dataSourceSchema = new mongoose.Schema({
id: String,
name: String,
organization: String,
dateCreated: Date,
dateUpdated: Date,
type: { type: String },
params: String,
settings: {
default: {
timestampColumn: String,
userIdColumn: String,
anonymousIdColumn: String,
},
experiments: {
table: String,
timestampColumn: String,
userIdColumn: String,
anonymousIdColumn: String,
experimentIdColumn: String,
variationColumn: String,
variationFormat: String,
},
users: {
table: String,
userIdColumn: String,
},
identifies: {
table: String,
userIdColumn: String,
anonymousIdColumn: String,
},
pageviews: {
table: String,
urlColumn: String,
timestampColumn: String,
userIdColumn: String,
anonymousIdColumn: String,
},
},
});
export type DataSourceDocument = mongoose.Document & DataSourceInterface;
export const DataSourceModel = mongoose.model<DataSourceDocument>(
"DataSource",
dataSourceSchema
);

View File

@@ -0,0 +1,17 @@
import mongoose from "mongoose";
import { DimensionInterface } from "../../types/dimension";
const dimensionSchema = new mongoose.Schema({
id: String,
organization: String,
datasource: String,
name: String,
sql: String,
dateCreated: Date,
dateUpdated: Date,
});
export type DimensionDocument = mongoose.Document & DimensionInterface;
export const DimensionModel = mongoose.model<DimensionDocument>(
"Dimension",
dimensionSchema
);

View File

@@ -0,0 +1,28 @@
import mongoose from "mongoose";
import { DiscussionInterface } from "../../types/discussion";
const discussionSchema = new mongoose.Schema({
id: String,
organization: String,
parentType: String,
parentId: String,
comments: [
{
_id: false,
date: Date,
userId: String,
userEmail: String,
userName: String,
content: String,
edited: Boolean,
},
],
dateUpdated: Date,
});
export type DiscussionDocument = mongoose.Document & DiscussionInterface;
export const DiscussionModel = mongoose.model<DiscussionDocument>(
"Discussion",
discussionSchema
);

View File

@@ -0,0 +1,87 @@
import mongoose from "mongoose";
import { ExperimentInterface } from "../../types/experiment";
export type ExperimentDocument = mongoose.Document & ExperimentInterface;
const experimentSchema = new mongoose.Schema({
id: String,
trackingKey: String,
organization: String,
owner: String,
datasource: String,
userIdType: String,
name: String,
dateCreated: Date,
dateUpdated: Date,
tags: [String],
description: String,
// Observations is not used anymore, keeping here so it will continue being saved in Mongo if present
observations: String,
hypothesis: String,
conversionWindowDays: Number,
metrics: [String],
activationMetric: String,
sqlOverride: {
type: Map,
of: String,
},
archived: Boolean,
status: String,
results: String,
analysis: String,
winner: Number,
currentPhase: Number,
autoAssign: Boolean,
implementation: String,
previewURL: String,
targetURLRegex: String,
variations: [
{
_id: false,
name: String,
description: String,
key: String,
screenshots: [
{
_id: false,
path: String,
width: Number,
height: Number,
description: String,
},
],
css: String,
dom: [
{
_id: false,
selector: String,
action: String,
attribute: String,
value: String,
},
],
},
],
phases: [
{
_id: false,
dateStarted: Date,
dateEnded: Date,
phase: String,
reason: String,
coverage: Number,
variationWeights: [Number],
targeting: String,
},
],
data: String,
targeting: String,
segment: String,
lastSnapshotAttempt: Date,
autoSnapshots: Boolean,
});
export const ExperimentModel = mongoose.model<ExperimentDocument>(
"Experiment",
experimentSchema
);

View File

@@ -0,0 +1,54 @@
import mongoose from "mongoose";
import { ExperimentSnapshotInterface } from "../../types/experiment-snapshot";
const experimentSnapshotSchema = new mongoose.Schema({
id: String,
experiment: String,
phase: Number,
type: { type: String },
dateCreated: Date,
manual: Boolean,
query: String,
queryLanguage: String,
dimension: String,
results: [
{
_id: false,
name: String,
srm: Number,
variations: [
{
_id: false,
users: Number,
metrics: {
type: Map,
of: {
_id: false,
value: Number,
cr: Number,
users: Number,
ci: [Number],
expected: Number,
buckets: [
{
_id: false,
x: Number,
y: Number,
},
],
chanceToWin: Number,
},
},
},
],
},
],
});
export type ExperimentSnapshotDocument = mongoose.Document &
ExperimentSnapshotInterface;
export const ExperimentSnapshotModel = mongoose.model<ExperimentSnapshotDocument>(
"ExperimentSnapshot",
experimentSnapshotSchema
);

View File

@@ -0,0 +1,77 @@
import mongoose from "mongoose";
import crypto from "crypto";
import { getUserByEmail } from "../services/users";
import { APP_ORIGIN } from "../util/secrets";
import { sendResetPasswordEmail } from "../services/email";
export interface ForgotPasswordInterface {
token: string;
userId: string;
createdAt: Date;
}
const forgotPasswordSchema = new mongoose.Schema({
token: {
type: String,
unique: true,
},
userId: String,
createdAt: {
type: Date,
// Link is valid for 30 minutes
expires: 30 * 60,
},
});
export type ForgotPasswordDocument = mongoose.Document &
ForgotPasswordInterface;
export const ForgotPasswordModel = mongoose.model<ForgotPasswordDocument>(
"ForgotPassword",
forgotPasswordSchema
);
export async function createForgotPasswordToken(email: string): Promise<void> {
const user = await getUserByEmail(email);
if (!user) {
throw new Error("Could not find a user with that email address");
}
const token = crypto.randomBytes(32).toString("hex");
const doc: ForgotPasswordInterface = {
userId: user.id,
token,
createdAt: new Date(),
};
await ForgotPasswordModel.create(doc);
const resetUrl = `${APP_ORIGIN}/reset-password?token=${token}`;
try {
await sendResetPasswordEmail(email, resetUrl);
} catch (e) {
console.error(
"Failed to send reset password email. The reset password link for " +
email +
" is: " +
resetUrl
);
throw e;
}
}
export async function getUserIdFromForgotPasswordToken(
token: string
): Promise<string> {
const doc = await ForgotPasswordModel.findOne({
token,
});
return doc?.userId || "";
}
export async function deleteForgotPasswordToken(token: string) {
return ForgotPasswordModel.deleteOne({
token,
});
}

View File

@@ -0,0 +1,35 @@
import mongoose from "mongoose";
import { IdeaInterface } from "../../types/idea";
const ideaSchema = new mongoose.Schema({
id: String,
text: String,
archived: Boolean,
details: String,
userId: String,
userName: String,
source: String,
organization: String,
tags: [String],
votes: [
{
_id: false,
userId: String,
dir: Number,
dateCreated: Date,
},
],
dateCreated: Date,
dateUpdated: Date,
impactScore: Number,
experimentLength: Number,
estimateParams: {
estimate: String,
improvement: Number,
numVariations: Number,
userAdjustment: Number,
},
});
export type IdeaDocument = mongoose.Document & IdeaInterface;
export const IdeaModel = mongoose.model<IdeaDocument>("Idea", ideaSchema);

View File

@@ -0,0 +1,139 @@
import mongoose from "mongoose";
import { ImpactEstimateInterface } from "../../types/impact-estimate";
import uniqid from "uniqid";
import { getMetricById } from "../services/experiments";
import {
getDataSourceById,
getSourceIntegrationObject,
} from "../services/datasource";
import { QueryLanguage } from "../../types/datasource";
import { SegmentInterface } from "../../types/segment";
import { SegmentModel } from "./SegmentModel";
const impactEstimateSchema = new mongoose.Schema({
id: String,
organization: String,
metric: String,
regex: String,
segment: String,
metricTotal: Number,
users: Number,
value: Number,
query: String,
queryLanguage: String,
dateCreated: Date,
});
export type ImpactEstimateDocument = mongoose.Document &
ImpactEstimateInterface;
export const ImpactEstimateModel = mongoose.model<ImpactEstimateDocument>(
"ImpactEstimate",
impactEstimateSchema
);
export async function createImpactEstimate(
organization: string,
metric: string,
segment: string | null,
regex: string,
value: number,
users: number,
metricTotal: number,
query: string = "",
queryLanguage: QueryLanguage = "none"
) {
const doc = await ImpactEstimateModel.create({
id: uniqid("est_"),
organization,
metric,
segment,
regex,
users,
value,
metricTotal,
query,
queryLanguage,
dateCreated: new Date(),
});
return doc;
}
export async function getImpactEstimate(
organization: string,
metric: string,
regex: string,
segment?: string
): Promise<ImpactEstimateDocument | null> {
// Sanity check (no quotes allowed)
if (!regex || regex.match(/['"]/g)) {
throw new Error("Invalid page regex");
}
// Only re-use estimates that happened within the last 30 days
const lastDate = new Date();
lastDate.setDate(lastDate.getDate() - 30);
const existing = await ImpactEstimateModel.findOne({
organization,
metric,
segment: segment || null,
regex,
dateCreated: {
$gt: lastDate,
},
});
if (existing) {
return existing;
}
const metricObj = await getMetricById(metric);
if (!metricObj) {
throw new Error("Metric not found");
}
if (metricObj.organization !== organization) {
throw new Error("You don't have access to that metric");
}
if (!metricObj.datasource) {
return null;
}
const datasource = await getDataSourceById(metricObj.datasource);
if (!datasource) {
throw new Error("Datasource not found");
}
if (datasource.organization !== organization) {
throw new Error("You don't have access to that datasource");
}
let segmentObj: SegmentInterface;
if (segment) {
segmentObj = await SegmentModel.findOne({
id: segment,
organization,
datasource: datasource.id,
});
}
const integration = getSourceIntegrationObject(datasource);
const data = await integration.getImpactEstimation(
regex,
metricObj,
segmentObj
);
return createImpactEstimate(
organization,
metric,
segment || null,
regex,
data.value,
data.users,
data.metricTotal,
data.query,
integration.getSourceProperties().queryLanguage
);
}

View File

@@ -0,0 +1,35 @@
import mongoose from "mongoose";
import { LearningInterface } from "../../types/insight";
const learningSchema = new mongoose.Schema({
id: String,
text: String,
details: String,
userId: String,
organization: String,
tags: [String],
evidence: [
{
_id: false,
experimentId: String,
},
],
votes: [
{
_id: false,
userId: String,
dir: Number,
dateCreated: Date,
},
],
status: String,
dateCreated: Date,
dateUpdated: Date,
});
export type LearningDocument = mongoose.Document & LearningInterface;
export const LearningModel = mongoose.model<LearningDocument>(
"Learning",
learningSchema
);

View File

@@ -0,0 +1,61 @@
import mongoose from "mongoose";
import { MetricInterface } from "../../types/metric";
import { queriesSchema } from "./QueryModel";
const metricSchema = new mongoose.Schema({
id: String,
organization: String,
datasource: String,
name: String,
description: String,
type: { type: String },
table: { type: String },
column: String,
earlyStart: Boolean,
inverse: Boolean,
ignoreNulls: Boolean,
cap: Number,
dateCreated: Date,
dateUpdated: Date,
userIdColumn: String,
anonymousIdColumn: String,
userIdType: String,
timestampColumn: String,
conditions: [
{
_id: false,
column: String,
operator: String,
value: String,
},
],
queries: queriesSchema,
runStarted: Date,
analysis: {
createdAt: Date,
users: Number,
average: Number,
stddev: Number,
count: Number,
percentiles: [
{
_id: false,
p: Number,
v: Number,
},
],
dates: [
{
_id: false,
d: Date,
v: Number,
},
],
},
});
export type MetricDocument = mongoose.Document & MetricInterface;
export const MetricModel = mongoose.model<MetricDocument>(
"Metric",
metricSchema
);

View File

@@ -0,0 +1,52 @@
import mongoose from "mongoose";
import { OrganizationInterface } from "../../types/organization";
const organizationSchema = new mongoose.Schema({
id: String,
url: String,
name: String,
ownerEmail: String,
members: [
{
_id: false,
id: String,
role: String,
},
],
invites: [
{
_id: false,
email: String,
key: String,
dateCreated: Date,
role: String,
},
],
stripeCustomerId: String,
subscription: {
id: String,
qty: Number,
trialEnd: Date,
status: String,
},
connections: {
slack: {
team: String,
token: String,
},
},
settings: {
implementationTypes: [String],
customized: Boolean,
logoPath: String,
primaryColor: String,
secondaryColor: String,
},
});
export type OrganizationDocument = mongoose.Document & OrganizationInterface;
export const OrganizationModel = mongoose.model<OrganizationDocument>(
"Organization",
organizationSchema
);

View File

@@ -0,0 +1,33 @@
import mongoose from "mongoose";
import { PastExperimentsInterface } from "../../types/past-experiments";
import { queriesSchema } from "./QueryModel";
const pastExperimentsSchema = new mongoose.Schema({
id: String,
organization: String,
datasource: String,
experiments: [
{
_id: false,
trackingKey: String,
numVariations: Number,
variationKeys: [String],
weights: [Number],
users: Number,
startDate: Date,
endDate: Date,
},
],
runStarted: Date,
queries: queriesSchema,
dateCreated: Date,
dateUpdated: Date,
});
export type PastExperimentsDocument = mongoose.Document &
PastExperimentsInterface;
export const PastExperimentsModel = mongoose.model<PastExperimentsDocument>(
"PastExperiments",
pastExperimentsSchema
);

View File

@@ -0,0 +1,21 @@
import mongoose from "mongoose";
import { PresentationInterface } from "../../types/presentation";
const presentationSchema = new mongoose.Schema({
id: String,
userId: String,
organization: String,
title: String,
description: String,
options: {},
experimentIds: [String],
dateCreated: Date,
dateUpdated: Date,
});
export type PresentationDocument = mongoose.Document & PresentationInterface;
export const PresentationModel = mongoose.model<PresentationDocument>(
"Presentation",
presentationSchema
);

View File

@@ -0,0 +1,30 @@
import mongoose from "mongoose";
import { QueryInterface } from "../../types/query";
export const queriesSchema = [
{
_id: false,
query: String,
status: String,
name: String,
},
];
const querySchema = new mongoose.Schema({
id: String,
organization: String,
datasource: String,
language: String,
query: String,
status: String,
createdAt: Date,
startedAt: Date,
finishedAt: Date,
heartbeat: Date,
result: {},
error: String,
});
export type QueryDocument = mongoose.Document & QueryInterface;
export const QueryModel = mongoose.model<QueryDocument>("Query", querySchema);

View File

@@ -0,0 +1,69 @@
import mongoose from "mongoose";
export interface QueryResult {
timestamp: Date;
rows: {
[key: string]: string;
}[];
}
export type VisualizationOptions = Record<string, unknown>;
export interface Visualization {
title: string;
type: string;
xAxis?: string[];
yAxis?: string[];
color?: string;
options?: VisualizationOptions;
}
export interface Query {
datasource: string;
query: string;
showTable: boolean;
visualizations: Visualization[];
}
export interface ReportInterface {
id: string;
organization: string;
title: string;
description: string;
queries: Query[];
dateCreated: Date;
dateUpdated: Date;
}
export type ReportDocument = mongoose.Document & ReportInterface;
const reportSchema = new mongoose.Schema({
id: String,
organization: String,
title: String,
description: String,
queries: [
{
_id: false,
datasource: String,
query: String,
showTable: Boolean,
visualizations: [
{
_id: false,
title: String,
type: { type: String },
xAxis: [String],
yAxis: [String],
color: String,
options: {},
},
],
},
],
dateCreated: Date,
dateUpdated: Date,
});
export const ReportModel = mongoose.model<ReportDocument>(
"Report",
reportSchema
);

View File

@@ -0,0 +1,67 @@
import mongoose from "mongoose";
import { SegmentComparisonInterface } from "../../types/segment-comparison";
import { queriesSchema } from "./QueryModel";
const segmentComparisonSchema = new mongoose.Schema({
id: String,
organization: String,
title: String,
datasource: String,
metrics: [String],
conversionWindow: Number,
segment1: {
segment: String,
from: Date,
to: Date,
},
segment2: {
segment: String,
sameDateRange: Boolean,
from: Date,
to: Date,
},
runStarted: Date,
queries: queriesSchema,
results: {
users: {
segment1: Number,
segment2: Number,
},
metrics: {
type: Map,
of: {
_id: false,
segment1: {
value: Number,
cr: Number,
users: Number,
},
segment2: {
value: Number,
cr: Number,
users: Number,
ci: [Number],
expected: Number,
buckets: [
{
_id: false,
x: Number,
y: Number,
},
],
chanceToWin: Number,
},
},
},
},
dateCreated: Date,
dateUpdated: Date,
});
export type SegmentComparisonDocument = mongoose.Document &
SegmentComparisonInterface;
export const SegmentComparisonModel = mongoose.model<SegmentComparisonDocument>(
"SegmentComparison",
segmentComparisonSchema
);

View File

@@ -0,0 +1,19 @@
import mongoose from "mongoose";
import { SegmentInterface } from "../../types/segment";
const segmentSchema = new mongoose.Schema({
id: String,
organization: String,
datasource: String,
name: String,
sql: String,
dateCreated: Date,
dateUpdated: Date,
targeting: String,
});
export type SegmentDocument = mongoose.Document & SegmentInterface;
export const SegmentModel = mongoose.model<SegmentDocument>(
"Segment",
segmentSchema
);

View File

@@ -0,0 +1,11 @@
import mongoose from "mongoose";
import { TagInterface } from "../../types/tag";
const tagSchema = new mongoose.Schema({
organization: String,
tags: [String],
});
export type TagDocument = mongoose.Document & TagInterface;
export const TagModel = mongoose.model<TagDocument>("Tag", tagSchema);

View File

@@ -0,0 +1,52 @@
import mongoose from "mongoose";
export interface Property {
name: string;
type: string;
lastSeen: Date;
}
export interface EventInterface {
name: string;
lastSeen: Date;
properties: Property[];
}
export interface TrackTableInterface {
id: string;
datasource: string;
table: string;
dateCreated: Date;
dateUpdated: Date;
events: EventInterface[];
}
export type TrackTableDocument = mongoose.Document & TrackTableInterface;
const trackTableSchema = new mongoose.Schema({
id: String,
datasource: String,
table: String,
dateCreated: Date,
dateUpdated: Date,
events: [
{
_id: false,
name: String,
lastSeend: Date,
properties: [
{
_id: false,
name: String,
type: { type: String },
lastSeen: Date,
},
],
},
],
});
export const TrackTableModel = mongoose.model<TrackTableDocument>(
"TrackTable",
trackTableSchema
);

View File

@@ -0,0 +1,14 @@
import mongoose from "mongoose";
import { UserInterface } from "../../types/user";
const userSchema = new mongoose.Schema({
id: String,
name: String,
email: String,
passwordHash: String,
admin: Boolean,
});
export type UserDocument = mongoose.Document & UserInterface;
export const UserModel = mongoose.model<UserDocument>("User", userSchema);

View File

@@ -0,0 +1,12 @@
import mongoose from "mongoose";
import { WatchInterface } from "../../types/watch";
const watchSchema = new mongoose.Schema({
userId: String,
organization: String,
experiments: [String],
});
export type WatchDocument = mongoose.Document & WatchInterface;
export const WatchModel = mongoose.model<WatchDocument>("Watch", watchSchema);

View File

@@ -0,0 +1,15 @@
import app, { init } from "./app";
// TODO: await this
init();
const server = app.listen(app.get("port"), () => {
console.log(
" Back-end is running at http://localhost:%d in %s mode",
app.get("port"),
app.get("env")
);
console.log(" Press CTRL-C to stop\n");
});
export default server;

View File

@@ -0,0 +1,45 @@
import uniqid from "uniqid";
import { ApiKeyModel } from "../models/ApiKeyModel";
import md5 from "md5";
export async function createApiKey(
organization: string,
description?: string
): Promise<string> {
const key = "key_" + md5(uniqid()).substr(0, 16);
await ApiKeyModel.create({
organization,
key,
description,
dateCreated: new Date(),
});
return key;
}
export async function deleteByOrganizationAndApiKey(
organization: string,
key: string
) {
await ApiKeyModel.deleteOne({
organization,
key,
});
return;
}
export async function lookupOrganizationByApiKey(key: string): Promise<string> {
const doc = await ApiKeyModel.findOne({
key,
});
if (!doc) return null;
return doc.organization || null;
}
export async function getAllApiKeysByOrganization(organization: string) {
return ApiKeyModel.find({
organization,
});
}

View File

@@ -0,0 +1,96 @@
import { Athena } from "aws-sdk";
import { ResultSet } from "aws-sdk/clients/athena";
import { AthenaConnectionParams } from "../../types/integrations/athena";
export async function runAthenaQuery<T>(
conn: AthenaConnectionParams,
sql: string
): Promise<T[]> {
const {
database,
bucketUri,
workGroup,
accessKeyId,
secretAccessKey,
region,
} = conn;
const athena = new Athena({
accessKeyId,
secretAccessKey,
region,
});
const { QueryExecutionId } = await athena
.startQueryExecution({
QueryString: sql,
QueryExecutionContext: {
Database: database,
},
ResultConfiguration: {
EncryptionConfiguration: {
EncryptionOption: "SSE_S3",
},
OutputLocation: bucketUri,
},
WorkGroup: workGroup || "primary",
})
.promise();
const waitAndCheck = () => {
return new Promise<false | ResultSet>((resolve, reject) => {
setTimeout(() => {
athena
.getQueryExecution({ QueryExecutionId })
.promise()
.then((resp) => {
const {
QueryExecution: {
Status: { State, StateChangeReason },
},
} = resp;
if (State === "RUNNING") {
resolve(false);
} else if (State === "FAILED") {
reject(new Error(StateChangeReason));
} else {
athena
.getQueryResults({ QueryExecutionId })
.promise()
.then(({ ResultSet }) => {
resolve(ResultSet);
})
.catch((e) => {
console.error(e);
reject(e);
});
}
})
.catch((e) => {
console.error(e);
reject(e);
});
}, 500);
});
};
// Timeout after 300 seconds
for (let i = 0; i < 600; i++) {
const result = await waitAndCheck();
if (result) {
const keys = result.ResultSetMetadata.ColumnInfo.map((info) => info.Name);
return result.Rows.slice(1).map((row) => {
// eslint-disable-next-line
const obj: any = {};
row.Data.forEach((value, i) => {
obj[keys[i]] = value.VarCharValue || null;
});
return obj;
});
}
}
// Cancel the query if it reaches this point
await athena.stopQueryExecution({ QueryExecutionId }).promise();
throw new Error("Query timed out after 5 minutes");
}

View File

@@ -0,0 +1,94 @@
import { AuditModel } from "../models/AuditModel";
import { AuditInterface } from "../../types/audit";
import uniqid from "uniqid";
import { WatchModel } from "../models/WatchModel";
import { QueryOptions } from "mongoose";
export function insertAudit(data: Partial<AuditInterface>) {
return AuditModel.create({
...data,
id: uniqid("aud_"),
});
}
export async function findByOrganization(
organization: string,
options?: QueryOptions
) {
return AuditModel.find(
{
organization,
},
options
);
}
export async function findByEntity(
type: string,
id: string,
options?: QueryOptions
) {
return AuditModel.find(
{
"entity.object": type,
"entity.id": id,
},
options
);
}
export async function findByEntityParent(
type: string,
id: string,
options?: QueryOptions
) {
return AuditModel.find(
{
"parent.object": type,
"parent.id": id,
},
options
);
}
export async function findByUserId(userId: string, options?: QueryOptions) {
return AuditModel.find(
{
"user.id": userId,
},
options
);
}
export async function getWatchedAudits(
userId: string,
organization: string,
options?: QueryOptions
) {
const doc = await WatchModel.findOne({
userId,
organization,
});
if (!doc) {
return [];
}
return AuditModel.find({
organization,
"entity.object": "experiment",
"entity.id": {
$in: doc.experiments,
},
event: {
$in: [
"experiment.start",
"experiment.stop",
"experiment.phase",
"experiment.results",
],
},
})
.sort({
dateCreated: -1,
})
.limit(options.limit || 50);
}

View File

@@ -0,0 +1,133 @@
import { IS_CLOUD, JWT_SECRET } from "../util/secrets";
import jwt from "express-jwt";
import jwks from "jwks-rsa";
import { NextFunction, Response } from "express";
import { AuthRequest } from "../types/AuthRequest";
import { UserDocument } from "../models/UserModel";
import {
getOrganizationById,
getPermissionsByRole,
getRole,
} from "./organizations";
import { MemberRole } from "../../types/organization";
import { AuditInterface } from "../../types/audit";
import { insertAudit } from "./audit";
import { getUserByEmail, getUserById } from "./users";
// Self-hosted deployments use local auth
function getLocalJWTCheck() {
if (!JWT_SECRET) {
throw new Error("Must specify JWT_SECRET environment variable");
}
return jwt({
secret: JWT_SECRET,
audience: "https://api.growthbook.io",
issuer: "https://api.growthbook.io",
algorithms: ["HS256"],
});
}
async function getUserFromLocalJWT(user: {
sub: string;
}): Promise<UserDocument> {
return getUserById(user.sub);
}
// Managed cloud deployment uses Auth0,
function getAuth0JWTCheck() {
return jwt({
secret: jwks.expressJwtSecret({
cache: true,
rateLimit: true,
jwksRequestsPerMinute: 5,
jwksUri: "https://growthbook.auth0.com/.well-known/jwks.json",
}),
audience: "https://api.growthbook.io",
issuer: "https://growthbook.auth0.com/",
algorithms: ["RS256"],
});
}
async function getUserFromAuth0JWT(user: {
"https://growthbook.io/email": string;
}): Promise<UserDocument> {
return getUserByEmail(user["https://growthbook.io/email"]);
}
export function getJWTCheck() {
return IS_CLOUD ? getAuth0JWTCheck() : getLocalJWTCheck();
}
export async function processJWT(
// eslint-disable-next-line
req: AuthRequest & { user: any },
res: Response,
next: NextFunction
) {
req.email = "";
req.permissions = {};
const user = await (IS_CLOUD
? getUserFromAuth0JWT(req.user)
: getUserFromLocalJWT(req.user));
if (user) {
req.email = user.email;
req.userId = user.id;
req.name = user.name;
req.admin = !!user.admin;
if (req.headers["x-organization"]) {
req.organization = await getOrganizationById(
"" + req.headers["x-organization"]
);
if (req.organization) {
// Make sure member is part of the organization
if (
!req.admin &&
!req.organization.members.filter((m) => m.id === req.userId).length
) {
return res.status(403).json({
status: 403,
message: "You do not have access to that organization",
});
}
const role: MemberRole = req.admin
? "admin"
: getRole(req.organization, req.userId);
req.permissions = getPermissionsByRole(role);
} else {
return res.status(404).json({
status: 404,
message: "Organization not found",
});
}
}
req.audit = async (data: Partial<AuditInterface>) => {
await insertAudit({
...data,
user: {
id: req.userId,
email: req.email,
name: user.name,
},
organization: req.organization?.id,
dateCreated: new Date(),
});
};
} else {
req.audit = async () => {
throw new Error("No user in request");
};
}
next();
}
export function validatePasswordFormat(password: string): void {
if (password.length < 8) {
throw new Error("Password must be at least 8 characters.");
}
}

View File

@@ -0,0 +1,121 @@
import uniqid from "uniqid";
import { AES, enc } from "crypto-js";
import { ENCRYPTION_KEY } from "../util/secrets";
import GoogleAnalytics, {
getOauth2Client,
} from "../integrations/GoogleAnalytics";
import Athena from "../integrations/Athena";
import Redshift from "../integrations/Redshift";
import Snowflake from "../integrations/Snowflake";
import Postgres from "../integrations/Postgres";
import { SourceIntegrationInterface } from "../types/Integration";
import BigQuery from "../integrations/BigQuery";
import Mixpanel from "../integrations/Mixpanel";
import { DataSourceModel } from "../models/DataSourceModel";
import {
DataSourceInterface,
DataSourceParams,
DataSourceSettings,
DataSourceType,
} from "../../types/datasource";
import { GoogleAnalyticsParams } from "../../types/integrations/googleanalytics";
export async function getDataSourcesByOrganization(organization: string) {
return await DataSourceModel.find({
organization,
});
}
export async function getDataSourceById(id: string) {
return await DataSourceModel.findOne({
id,
});
}
export function decryptDataSourceParams<T = DataSourceParams>(
encrypted: string
): T {
return JSON.parse(AES.decrypt(encrypted, ENCRYPTION_KEY).toString(enc.Utf8));
}
export function encryptParams(params: DataSourceParams): string {
return AES.encrypt(JSON.stringify(params), ENCRYPTION_KEY).toString();
}
export function mergeAndEncryptParams(
newParams: Partial<DataSourceParams>,
existingParams: string
): string {
const params = decryptDataSourceParams(existingParams);
Object.assign(params, newParams);
return encryptParams(params);
}
export function getSourceIntegrationObject(datasource: DataSourceInterface) {
const { type, params, settings } = datasource;
let obj: SourceIntegrationInterface;
if (type === "athena") {
obj = new Athena(params, settings);
} else if (type === "redshift") {
obj = new Redshift(params, settings);
} else if (type === "google_analytics") {
obj = new GoogleAnalytics(params, settings);
} else if (type === "snowflake") {
obj = new Snowflake(params, settings);
} else if (type === "postgres") {
obj = new Postgres(params, settings);
} else if (type === "bigquery") {
obj = new BigQuery(params, settings);
} else if (type === "mixpanel") {
obj = new Mixpanel(params, settings);
} else {
throw new Error("Unknown data source type: " + type);
}
obj.organization = datasource.organization;
obj.datasource = datasource.id;
return obj;
}
export async function testDataSourceConnection(
datasource: DataSourceInterface
) {
const integration = getSourceIntegrationObject(datasource);
await integration.testConnection();
}
export async function createDataSource(
organization: string,
name: string,
type: DataSourceType,
params: DataSourceParams,
settings?: DataSourceSettings
) {
const id = uniqid("ds_");
if (type === "google_analytics") {
const oauth2Client = getOauth2Client();
const { tokens } = await oauth2Client.getToken(
(params as GoogleAnalyticsParams).refreshToken
);
(params as GoogleAnalyticsParams).refreshToken = tokens.refresh_token;
}
const datasource: DataSourceInterface = {
id,
name,
organization,
type,
settings,
dateCreated: new Date(),
dateUpdated: new Date(),
params: encryptParams(params),
};
// Test the connection and create in the database
await testDataSourceConnection(datasource);
const model = await DataSourceModel.create(datasource);
return model;
}

View File

@@ -0,0 +1,79 @@
import uniqid from "uniqid";
import { Comment, DiscussionParentType } from "../../types/discussion";
import { DiscussionModel } from "../models/DiscussionModel";
export async function getDiscussionByParent(
organization: string,
parentType: DiscussionParentType,
parentId: string
) {
return await DiscussionModel.findOne({
organization,
parentType,
parentId,
});
}
export async function getAllDiscussionsByOrg(organization: string) {
return await DiscussionModel.find({
organization,
});
}
export async function getAllDiscussionsByOrgFromDate(
organization: string,
date: Date
) {
return await DiscussionModel.find({
organization,
dateUpdated: { $gte: date },
});
}
export async function getLastNDiscussions(organization: string, num: number) {
return await DiscussionModel.find({
organization,
})
.sort({ dateUpdated: -1 })
.limit(num);
}
export async function addComment(
organization: string,
parentType: DiscussionParentType,
parentId: string,
user: { id: string; email: string; name: string },
comment: string
) {
const newComment: Comment = {
content: comment,
date: new Date(),
userEmail: user.email,
userId: user.id,
userName: user.name,
};
const discussion = await getDiscussionByParent(
organization,
parentType,
parentId
);
// Comment thread already exists
if (discussion && discussion.id) {
discussion.comments.push(newComment);
discussion.dateUpdated = new Date();
discussion.markModified("comments");
await discussion.save();
return;
}
// Doesn't exist, create it
await DiscussionModel.create({
id: uniqid("com_"),
organization,
parentType,
parentId,
comments: [newComment],
dateUpdated: new Date(),
});
}

View File

@@ -0,0 +1,150 @@
import {
EMAIL_ENABLED,
EMAIL_USE_TLS,
EMAIL_FROM,
EMAIL_HOST,
EMAIL_HOST_PASSWORD,
EMAIL_HOST_USER,
EMAIL_PORT,
SITE_MANAGER_EMAIL,
APP_ORIGIN,
} from "../util/secrets";
import nodemailer from "nodemailer";
import nunjucks from "nunjucks";
import { OrganizationDocument } from "../models/OrganizationModel";
import { getEmailFromUserId, getInviteUrl } from "./organizations";
import path from "path";
export function isEmailEnabled(): boolean {
if (!EMAIL_ENABLED) return false;
if (!EMAIL_HOST) return false;
if (!EMAIL_PORT) return false;
if (!EMAIL_HOST_USER) return false;
if (!EMAIL_HOST_PASSWORD) return false;
if (!EMAIL_FROM) return false;
return true;
}
nunjucks.configure(path.join(__dirname, "..", "templates", "email"), {
autoescape: true,
});
const transporter = isEmailEnabled()
? nodemailer.createTransport({
host: EMAIL_HOST,
port: EMAIL_PORT,
secure: EMAIL_USE_TLS,
auth: {
user: EMAIL_HOST_USER,
pass: EMAIL_HOST_PASSWORD,
},
})
: null;
async function sendMail({
html,
subject,
to,
text,
}: {
html: string;
subject: string;
to: string;
text: string;
}) {
if (!isEmailEnabled()) {
throw new Error("Email server not configured");
}
try {
await transporter.sendMail({
from: `"Growth Book" <${EMAIL_FROM}>`,
to,
subject,
text,
html,
});
} catch (e) {
console.error(e);
throw e;
}
}
export async function sendInviteEmail(
organization: OrganizationDocument,
key: string
) {
const invite = organization.invites.filter((invite) => invite.key === key)[0];
if (!invite) {
throw new Error("Could not find invite with specified key");
}
const inviteUrl = getInviteUrl(key);
const html = nunjucks.render("invite.jinja", {
inviteUrl,
organizationName: organization.name,
});
await sendMail({
html,
subject: `You've been invited to join ${organization.name} on Growth Book`,
to: invite.email,
text: `Join ${organization.name} on Growth Book by visiting ${inviteUrl}`,
});
}
export async function sendExperimentChangesEmail(
userIds: string[],
experimentId: string,
experimentName: string,
experimentChanges: string[]
) {
const experimentUrl = APP_ORIGIN + "experiment/" + experimentId + "#results";
const html = nunjucks.render("experiment-changes.jinja", {
experimentChanges,
experimentUrl,
experimentName,
});
const subject = `Experiment Change for: ${experimentName}`;
await Promise.all(
userIds.map(async (id) => {
const email = await getEmailFromUserId(id);
await sendMail({
html,
subject,
to: email,
text:
`The experiment '${experimentName}' has the following metric changes:` +
"- " +
experimentChanges.join("\n- ") +
`\n\nSee more details at ${experimentUrl}`,
});
})
);
}
export async function sendResetPasswordEmail(email: string, resetUrl: string) {
const html = nunjucks.render("reset-password.jinja", {
resetUrl,
});
await sendMail({
html,
subject: "Reset Growth Book Password",
to: email,
text: `Reset your password by visiting ${resetUrl}`,
});
}
export async function sendNewOrgEmail(company: string, email: string) {
if (!SITE_MANAGER_EMAIL) return;
const html = nunjucks.render("new-organization.jinja", {
company,
email,
});
await sendMail({
html,
subject: `New company created: ${company}`,
to: SITE_MANAGER_EMAIL,
text: `Company Name: ${company}\nOwner Email: ${email}`,
});
}

View File

@@ -0,0 +1,66 @@
/*
import {TrackTableModel} from "../models/TrackTableModel";
import {DataSourceInterface} from "../models/DataSourceModel";
import {getLatestEvents} from "./datasource";
import unionBy from "lodash/unionBy";
import uniqid from "uniqid";
export async function getTrackTableByDataSource(datasource: string) {
return TrackTableModel.findOne({
datasource
});
}
export async function getTrackTableByDataSources(datasources: string[]) {
return TrackTableModel.find({
datasource: {$in: datasources}
});
}
export async function syncTrackTable(datasource: DataSourceInterface) {
// Get existing model
const trackTable = await getTrackTableByDataSource(datasource.id);
if (trackTable) {
// Get latest events from the data warehouse and build a hash lookup map
const events = await getLatestEvents(datasource, trackTable.table, trackTable.dateUpdated);
const newEventsMap = new Map();
events.forEach(event => {
newEventsMap.set(event.name, event);
});
// Update any existing events
trackTable.events.forEach((event) => {
if (newEventsMap.has(event.name)) {
const newEvent = newEventsMap.get(event.name);
event.properties = unionBy(newEvent.properties, event.properties, "name");
event.lastSeen = newEvent.lastSeen;
}
});
// Add any new records not already in trackTable
const existingEvents = trackTable.events.map(event => event.name);
events.forEach(event => {
if (!existingEvents.includes(event.name)) {
trackTable.events.push(event);
}
});
trackTable.markModified("events");
trackTable.set("dateUpdated", new Date());
await trackTable.save();
return trackTable;
}
else {
const events = await getLatestEvents(datasource, "tracks", null);
return await TrackTableModel.create({
id: uniqid("tr_"),
datasource: datasource.id,
table: "tracks",
dateCreated: new Date(),
dateUpdated: new Date(),
events
});
}
}
*/

View File

@@ -0,0 +1,632 @@
import { ExperimentModel } from "../models/ExperimentModel";
import {
SnapshotVariation,
ExperimentSnapshotInterface,
} from "../../types/experiment-snapshot";
import { MetricModel } from "../models/MetricModel";
import uniqid from "uniqid";
import {
binomialABTest,
srm,
ABTestStats,
countABTest,
bootstrapABTest,
getValueCR,
} from "./stats";
import { getSourceIntegrationObject } from "./datasource";
import { addTags } from "./tag";
import { WatchModel } from "../models/WatchModel";
import { QueryMap } from "./queries";
import { PastExperimentResult } from "../types/Integration";
import { ExperimentSnapshotModel } from "../models/ExperimentSnapshotModel";
import { MetricInterface } from "../../types/metric";
import { ExperimentInterface } from "../../types/experiment";
import { DimensionInterface } from "../../types/dimension";
import { DataSourceInterface } from "../../types/datasource";
import { PastExperiment } from "../../types/past-experiments";
export function getExperimentsByOrganization(organization: string) {
return ExperimentModel.find({
organization,
});
}
export async function getExperimentById(id: string) {
const experiment = await ExperimentModel.findOne({
id,
});
return experiment;
}
export function getExperimentByTrackingKey(
organization: string,
trackingKey: string
) {
return ExperimentModel.findOne({
organization,
trackingKey,
});
}
export async function getExperimentsByIds(ids: string[]) {
return ExperimentModel.find({
id: { $in: ids },
});
}
export function deleteExperimentById(id: string) {
return ExperimentModel.deleteOne({
id,
});
}
export function deleteMetricById(id: string) {
return MetricModel.deleteOne({
id,
});
}
type OldSnapshotModel = ExperimentSnapshotInterface & {
srm: number;
variations: SnapshotVariation[];
};
export async function getLatestSnapshot(
experiment: string,
phase: number,
dimension?: string
) {
const query = { experiment, phase, dimension: dimension || null };
const all = await ExperimentSnapshotModel.find(query, null, {
sort: { dateCreated: -1 },
limit: 1,
}).exec();
// Backwards compatibility with old data format (can remove later)
if (all[0] && !all[0].results[0]) {
const old = all[0].toJSON() as OldSnapshotModel;
all[0].results = [
{
name: "All",
srm: old.srm,
variations: old.variations,
},
];
}
return all[0];
}
export function getMetricsByOrganization(organization: string) {
return MetricModel.find({
organization,
});
}
export function getMetricsByDatasource(datasource: string) {
return MetricModel.find({
datasource,
});
}
export function getMetricById(id: string) {
return MetricModel.findOne({
id,
});
}
export async function createMetric(data: Partial<MetricInterface>) {
return MetricModel.create({
...data,
id: uniqid("met_"),
dateCreated: new Date(),
dateUpdated: new Date(),
});
}
function generateTrackingKey(name: string, n: number): string {
let key = ("-" + name)
.toLowerCase()
// Replace whitespace with hyphen
.replace(/\s+/g, "-")
// Get rid of all non alpha-numeric characters
.replace(/[^a-z0-9\-_]*/g, "")
// Remove stopwords
.replace(
/-((a|about|above|after|again|all|am|an|and|any|are|arent|as|at|be|because|been|before|below|between|both|but|by|cant|could|did|do|does|dont|down|during|each|few|for|from|had|has|have|having|here|how|if|in|into|is|isnt|it|its|itself|more|most|no|nor|not|of|on|once|only|or|other|our|out|over|own|same|should|shouldnt|so|some|such|that|than|then|the|there|theres|these|this|those|through|to|too|under|until|up|very|was|wasnt|we|weve|were|what|whats|when|where|which|while|who|whos|whom|why|with|wont|would)-)+/g,
"-"
)
// Collapse duplicate hyphens
.replace(/-{2,}/g, "-")
// Remove leading and trailing hyphens
.replace(/(^-|-$)/g, "");
// Add number if this is not the first attempt
if (n > 1) {
key += "-" + n;
}
return key;
}
export async function createExperiment(data: Partial<ExperimentInterface>) {
if (data.trackingKey) {
// Make sure id is unique
const existing = await getExperimentByTrackingKey(
data.organization,
data.trackingKey
);
if (existing) {
throw new Error(
"Error: Duplicate tracking key. Please choose something else"
);
}
} else {
// Try to generate a unique tracking key based on the experiment name
let n = 1;
let found = null;
while (n < 10 && !found) {
const key = generateTrackingKey(data.name, n);
if (!(await getExperimentByTrackingKey(data.organization, key))) {
found = key;
}
n++;
}
// Fall back to uniqid if couldn't generate
data.trackingKey = found || uniqid();
}
const exp = await ExperimentModel.create({
...data,
id: uniqid("exp_"),
dateCreated: new Date(),
dateUpdated: new Date(),
autoSnapshots: true,
lastSnapshotAttempt: new Date(),
});
if (data.tags) {
await addTags(data.organization, data.tags);
}
return exp;
}
export async function getManualSnapshotData(
experiment: ExperimentInterface,
phaseIndex: number,
users: number[],
metrics: { [key: string]: number[] }
) {
// Default variation values, override from SQL results if available
const variations: SnapshotVariation[] = experiment.variations.map((v, i) => ({
users: users[i],
metrics: {},
}));
const phase = experiment.phases[phaseIndex];
const metricMap = new Map<string, MetricInterface>();
const allMetrics = await getMetricsByOrganization(experiment.organization);
allMetrics.forEach((m) => {
metricMap.set(m.id, m);
});
Object.keys(metrics).forEach((m) => {
const metric = metricMap.get(m);
experiment.variations.forEach((v, i) => {
// Baseline
if (!i) {
variations[i].metrics[m] = getValueCR(
metric,
metrics[m][i],
users[i],
users[i]
);
}
// Variation
else {
const type = metric.type;
let stats: ABTestStats;
if (type === "binomial") {
stats = binomialABTest(
metrics[m][0],
users[0] - metrics[m][0],
metrics[m][i],
users[i] - metrics[m][i]
);
} else if (type === "count") {
stats = countABTest(metrics[m][0], users[0], metrics[m][i], users[i]);
} else if (type === "duration") {
stats = bootstrapABTest(
{
mean: metrics[m][0] / users[0],
count: users[0],
stddev: metrics[m][0] / users[0],
},
users[0],
{
mean: metrics[m][i] / users[i],
count: users[i],
stddev: metrics[m][i] / users[i],
},
users[i],
metric?.ignoreNulls || false
);
} else {
throw new Error("Metric type not supported: " + type);
}
// TODO: support other metric types
if (metric.inverse) {
stats.chanceToWin = 1 - stats.chanceToWin;
}
variations[i].metrics[m] = {
...getValueCR(metric, metrics[m][i], users[i], users[i]),
...stats,
};
}
});
});
// Check to see if the observed number of samples per variation matches what we expect
// This returns a p-value and a small value indicates the results are untrustworthy
const sampleRatioMismatch = srm(
variations.map((v) => v.users),
phase.variationWeights
);
return {
srm: sampleRatioMismatch,
variations,
};
}
export async function createManualSnapshot(
experiment: ExperimentInterface,
phaseIndex: number,
users: number[],
metrics: { [key: string]: number[] }
) {
const { srm, variations } = await getManualSnapshotData(
experiment,
phaseIndex,
users,
metrics
);
const data: ExperimentSnapshotInterface = {
id: uniqid("snp_"),
experiment: experiment.id,
phase: phaseIndex,
dateCreated: new Date(),
manual: true,
results: [
{
name: "All",
srm,
variations,
},
],
};
const snapshot = await ExperimentSnapshotModel.create(data);
return snapshot;
}
export async function createSnapshot(
experiment: ExperimentInterface,
phaseIndex: number,
datasource: DataSourceInterface,
dimension?: DimensionInterface
) {
const metrics = await getMetricsByOrganization(experiment.organization);
const metricMap = new Map<string, MetricInterface>();
metrics.forEach((m) => {
metricMap.set(m.id, m);
});
const activationMetric = metricMap.get(experiment.activationMetric) || null;
// Only include metrics tied to this experiment
const selectedMetrics = experiment.metrics
.map((m) => metricMap.get(m))
.filter((m) => m);
if (!selectedMetrics.length) {
throw new Error("Experiment must have at least 1 metric selected.");
}
const phase = experiment.phases[phaseIndex];
// Update lastSnapshotAttempt
experiment.lastSnapshotAttempt = new Date();
await ExperimentModel.updateOne(
{
id: experiment.id,
},
{
$set: {
lastSnapshotAttempt: experiment.lastSnapshotAttempt,
},
}
);
// Generate and run the SQL for test results
const integration = getSourceIntegrationObject(datasource);
const { results: rows, query } = await integration.getExperimentResults(
experiment,
phase,
selectedMetrics,
activationMetric,
dimension
);
const results: {
name: string;
srm: number;
variations: SnapshotVariation[];
}[] = [];
rows.forEach((d) => {
// Default variation values, override from SQL results if available
const variations: SnapshotVariation[] = experiment.variations.map(() => ({
users: 0,
metrics: {},
}));
const metricData = new Map<
string,
{ count: number; mean: number; stddev: number }[]
>();
d.variations.forEach((row) => {
const variation = row.variation;
if (!variations[variation]) {
return;
}
variations[variation].users = row.users || 0;
row.metrics.forEach((m) => {
const doc = metricData.get(m.metric) || [];
doc[variation] = {
count: m.count,
mean: m.mean,
stddev: m.stddev,
};
metricData.set(m.metric, doc);
});
});
metricData.forEach((v, k) => {
const baselineSuccess = v[0]?.count * v[0]?.mean || 0;
v.forEach((data, i) => {
const success = data.count * data.mean;
const metric = metricMap.get(k);
const type = metric?.type || "binomial";
const ignoreNulls = metric?.ignoreNulls || false;
const value = success;
// Don't do stats for the baseline or when breaking down by dimension
// We aren't doing a correction for multiple tests, so the numbers would be misleading for the break down
// Can enable this later when we have a more robust stats engine
if (!i || dimension) {
variations[i].metrics[k] = getValueCR(
metric,
value,
data.count,
variations[i].users
);
return;
}
let stats: ABTestStats;
// Short cut if either the baseline or variation has no data
if (!baselineSuccess || !success) {
stats = {
buckets: [],
chanceToWin: 0,
ci: [0, 0],
expected: 0,
};
} else if (type === "binomial") {
stats = binomialABTest(
baselineSuccess,
variations[0].users - baselineSuccess,
success,
variations[i].users - success
);
} else if (type === "count") {
stats = countABTest(
baselineSuccess,
variations[0].users,
success,
variations[i].users
);
} else if (type === "duration") {
stats = bootstrapABTest(
v[0],
variations[0].users,
data,
variations[i].users,
ignoreNulls
);
} else if (type === "revenue") {
stats = bootstrapABTest(
v[0],
variations[0].users,
data,
variations[i].users,
ignoreNulls
);
} else {
throw new Error("Metric type not supported: " + type);
}
if (metric.inverse) {
stats.chanceToWin = 1 - stats.chanceToWin;
}
variations[i].metrics[k] = {
...getValueCR(metric, value, data.count, variations[i].users),
...stats,
};
});
});
// Check to see if the observed number of samples per variation matches what we expect
// This returns a p-value and a small value indicates the results are untrustworthy
const sampleRatioMismatch = srm(
variations.map((v) => v.users),
phase.variationWeights
);
results.push({
name: d.dimension,
srm: sampleRatioMismatch,
variations,
});
});
const data: ExperimentSnapshotInterface = {
id: uniqid("snp_"),
experiment: experiment.id,
dateCreated: new Date(),
phase: phaseIndex,
manual: false,
query,
queryLanguage: integration.getSourceProperties().queryLanguage,
dimension: dimension?.id || null,
results,
};
const snapshot = await ExperimentSnapshotModel.create(data);
// After successful snapshot, turn on autosnapshots
experiment.autoSnapshots = true;
await ExperimentModel.updateOne(
{
id: experiment.id,
},
{
$set: {
autoSnapshots: true,
},
}
);
return snapshot;
}
export async function ensureWatching(
userId: string,
orgId: string,
experiment: string
) {
await WatchModel.updateOne(
{
userId,
organization: orgId,
},
{
$addToSet: {
experiments: experiment,
},
},
{
upsert: true,
}
);
}
export async function getExperimentWatchers(experimentId: string) {
const watchers = await WatchModel.find({
experiment: experimentId,
});
return watchers;
}
export async function processPastExperiments(
data: QueryMap
): Promise<PastExperiment[]> {
const experiments =
(data.get("experiments")?.result as PastExperimentResult)?.experiments ||
[];
const experimentMap = new Map<string, PastExperiment>();
experiments.forEach((e) => {
let el: PastExperiment = experimentMap.get(e.experiment_id);
if (!el) {
el = {
endDate: e.end_date,
startDate: e.start_date,
numVariations: 1,
variationKeys: [e.variation_id],
trackingKey: e.experiment_id,
users: e.users,
weights: [e.users],
};
experimentMap.set(e.experiment_id, el);
} else {
if (e.start_date < el.startDate) {
el.startDate = e.start_date;
}
if (e.end_date > el.endDate) {
el.endDate = e.end_date;
}
el.variationKeys.push(e.variation_id);
el.weights.push(e.users);
el.users += e.users;
el.numVariations++;
}
});
// Round the weights
const possibleWeights = [
5,
10,
16,
20,
25,
30,
33,
40,
50,
60,
67,
70,
75,
80,
90,
95,
];
experimentMap.forEach((exp) => {
const totalWeight = exp.weights.reduce((sum, weight) => sum + weight, 0);
exp.weights = exp.weights.map((w) => {
// Map the observed percentage traffic to the closest reasonable number
const p = Math.round((w / totalWeight) * 100);
return possibleWeights
.map((x) => [x, Math.abs(x - p)])
.sort((a, b) => a[1] - b[1])[0][0];
});
// Make sure total weight adds to 1 (if not, increase the control until it does)
const newTotalWeight = exp.weights.reduce((sum, weight) => sum + weight, 0);
if (newTotalWeight < 100) {
exp.weights[0] += 100 - newTotalWeight;
}
exp.weights = exp.weights.map((w) => w / 100);
});
// Filter out experiments with too few or too many variations
return Array.from(experimentMap.values()).filter(
(e) => e.numVariations > 1 && e.numVariations < 10
);
}

View File

@@ -0,0 +1,39 @@
import uniqid from "uniqid";
import AWS from "aws-sdk";
import { S3_BUCKET, S3_DOMAIN, S3_REGION } from "../util/secrets";
AWS.config.update({ region: S3_REGION });
const s3 = new AWS.S3();
export async function getFileUploadURL(ext: string, pathPrefix: string) {
const mimetypes: { [key: string]: string } = {
png: "image/png",
jpg: "image/jpeg",
jpeg: "image/jpeg",
gif: "image/gif",
svg: "text/svg",
};
if (!mimetypes[ext]) {
throw new Error(
`Invalid image file type. Only ${Object.keys(mimetypes).join(
", "
)} accepted.`
);
}
const filename = uniqid("img_");
const s3Params = {
Bucket: S3_BUCKET,
Key: `${pathPrefix}${filename}.${ext}`,
ContentType: mimetypes[ext],
ACL: "public-read",
};
const uploadURL = s3.getSignedUrl("putObject", s3Params);
return {
uploadURL,
fileURL: S3_DOMAIN + "/" + s3Params.Key,
};
}

View File

@@ -0,0 +1,58 @@
import { IdeaModel } from "../models/IdeasModel";
import uniqid from "uniqid";
import { addTags } from "./tag";
import { IdeaInterface } from "../../types/idea";
//import {query} from "../config/postgres";
export function getIdeasByOrganization(organization: string) {
return IdeaModel.find({
organization,
});
}
export async function createIdea(data: Partial<IdeaInterface>) {
const idea = await IdeaModel.create({
// Default values that can be overridden
// The data object passed in
...data,
// Values that cannot be overridden
id: uniqid("idea_"),
dateCreated: new Date(),
dateUpdated: new Date(),
});
if (idea.tags) {
await addTags(idea.organization, idea.tags);
}
return idea;
}
export function getIdeaById(id: string) {
return IdeaModel.findOne({
id,
});
}
export function getIdeasByIds(ids: string[]) {
return IdeaModel.find({
id: { $in: ids },
});
}
export function getIdeasByExperimentIds(ids: string[]) {
const tmp: { experimentId: string }[] = [];
ids.map((id) => {
tmp.push({ experimentId: id });
});
return IdeaModel.find({
evidence: { $in: tmp },
});
}
export function deleteIdeaById(id: string) {
return IdeaModel.deleteOne({
id,
});
}

View File

@@ -0,0 +1,58 @@
import { LearningModel } from "../models/LearningsModel";
import uniqid from "uniqid";
import { addTags } from "./tag";
import { LearningInterface } from "../../types/insight";
//import {query} from "../config/postgres";
export function getLearningsByOrganization(organization: string) {
return LearningModel.find({
organization,
});
}
export async function createLearning(data: Partial<LearningInterface>) {
const learning = await LearningModel.create({
// Default values that can be overridden
// The data object passed in
...data,
// Values that cannot be overridden
id: uniqid("lrn_"),
dateCreated: new Date(),
dateUpdated: new Date(),
});
if (learning.tags) {
await addTags(learning.organization, learning.tags);
}
return learning;
}
export function getLearningById(id: string) {
return LearningModel.findOne({
id,
});
}
export function getLearningsByIds(ids: string[]) {
return LearningModel.find({
id: { $in: ids },
});
}
export function getLearningsByExperimentIds(ids: string[]) {
const tmp: { experimentId: string }[] = [];
ids.map((id) => {
tmp.push({ experimentId: id });
});
return LearningModel.find({
evidence: { $in: tmp },
});
}
export function deleteLearningById(id: string) {
return LearningModel.deleteOne({
id,
});
}

View File

@@ -0,0 +1,77 @@
import { URLSearchParams } from "url";
import fetch from "node-fetch";
import { MixpanelConnectionParams } from "../../types/integrations/mixpanel";
const encodedParams = new URLSearchParams();
// eslint-disable-next-line
type MixpanelResultRow = any;
function indentJs(js: string) {
let indent = 0;
return (
js
// Get rid of existing indentation
.replace(/^\s+/gm, "")
// Split into lines
.split(/\n/g)
// Add new indentation
.map((line) => {
// Remove an indent when the line closes a block. e.g. ");" or "}))];"
if (line.match(/^[})\]\s]+;?\s*$/)) indent--;
line = " ".repeat(Math.max(0, indent)) + line;
// Add an indent when the line starts a block. e.g. "if(...) {" or "const a = ["
if (line.match(/[{([]+\s*$/)) indent++;
return line;
})
// Combine back to a single string
.join("\n")
// Remove excessive consecutive newlines
.replace(/\n[ ]*\n[ ]*\n/g, "\n\n")
// Remove leading and trailing whitespace
.trim()
);
}
export function formatQuery(
js: string,
params?: Record<string, unknown>,
extraJs?: string
) {
return indentJs(`
${params ? `var params = ${JSON.stringify(params, null, 2)};` : ""}
${extraJs ? extraJs : ""}
function main() {
${js}
}
`);
}
export async function runQuery<T extends MixpanelResultRow>(
conn: MixpanelConnectionParams,
query: string
): Promise<T> {
encodedParams.set("script", query);
encodedParams.set("project_id", conn.projectId);
const url = "https://mixpanel.com/api/2.0/jql";
const options = {
method: "POST",
headers: {
Accept: "application/json",
"Content-Type": "application/x-www-form-urlencoded",
Authorization: `Basic ${Buffer.from(
`${conn.username}:${conn.secret}`
).toString("base64")}`,
},
body: encodedParams,
};
const res = await fetch(url, options);
const json = await res.json();
if (json.error) {
throw new Error(json.error);
}
return json;
}

View File

@@ -0,0 +1,214 @@
import {
OrganizationModel,
OrganizationDocument,
} from "../models/OrganizationModel";
import uniqid from "uniqid";
import { randomBytes } from "crypto";
import { APP_ORIGIN } from "../util/secrets";
import { AuthRequest } from "../types/AuthRequest";
import { UserModel } from "../models/UserModel";
import { isEmailEnabled, sendInviteEmail } from "./email";
import {
MemberRole,
OrganizationInterface,
Permissions,
} from "../../types/organization";
export async function getOrganizationById(id: string) {
return OrganizationModel.findOne({
id,
});
}
export function getRole(
org: OrganizationInterface,
userId: string
): MemberRole | null {
return (
org.members.filter((m) => m.id === userId).map((m) => m.role)[0] || null
);
}
export function getPermissionsByRole(role: MemberRole): Permissions {
const permissions: Permissions = {};
switch (role) {
case "admin":
permissions.organizationSettings = true;
// falls through
case "developer":
permissions.runExperiments = true;
permissions.createMetrics = true;
// falls through
case "designer":
permissions.draftExperiments = true;
}
return permissions;
}
export async function userHasAccess(
req: AuthRequest,
organization: string
): Promise<boolean> {
if (req.admin) return true;
if (req.organization?.id === organization) return true;
const doc = await getOrganizationById(organization);
if (doc && doc.members.map((m) => m.id).includes(req.userId)) {
return true;
}
return false;
}
export async function getAllOrganizationsByUserId(userId: string) {
return OrganizationModel.find({
members: {
$elemMatch: {
id: userId,
},
},
});
}
export function createOrganization(
email: string,
userId: string,
name: string,
url: string
) {
// TODO: sanitize fields
return OrganizationModel.create({
ownerEmail: email,
name,
url,
invites: [],
members: [
{
id: userId,
role: "admin",
},
],
id: uniqid("org_"),
});
}
export async function removeMember(
organization: OrganizationDocument,
id: string
) {
organization.members = organization.members.filter(
(member) => member.id !== id
);
if (!organization.members.length) {
throw new Error("Organizations must have at least 1 member");
}
organization.markModified("members");
await organization.save();
return organization;
}
export async function revokeInvite(
organization: OrganizationDocument,
key: string
) {
organization.invites = organization.invites.filter(
(invite) => invite.key !== key
);
organization.markModified("invites");
await organization.save();
return organization;
}
export function getInviteUrl(key: string) {
return `${APP_ORIGIN}/invitation?key=${key}`;
}
export async function acceptInvite(key: string, userId: string) {
const organization = await OrganizationModel.findOne({
"invites.key": key,
});
if (!organization) {
throw new Error("Invalid key");
}
const invite = organization.invites.filter((invite) => invite.key === key)[0];
// Remove invite
organization.invites = organization.invites.filter(
(invite) => invite.key !== key
);
organization.markModified("invites");
// Add to member list
organization.members.push({
id: userId,
role: invite?.role || "admin",
});
organization.markModified("members");
await organization.save();
return organization;
}
export async function inviteUser(
organization: OrganizationDocument,
email: string,
role: MemberRole = "admin"
) {
organization.invites = organization.invites || [];
// User is already invited
if (
organization.invites.filter((invite) => invite.email === email).length > 0
) {
return {
emailSent: true,
inviteUrl: getInviteUrl(
organization.invites.filter((invite) => invite.email === email)[0].key
),
};
}
// Generate random key for invite
const buffer: Buffer = await new Promise((resolve, reject) => {
randomBytes(32, function (ex, buffer) {
if (ex) {
reject("error generating token");
}
resolve(buffer);
});
});
const key = buffer.toString("base64").replace(/[^a-zA-Z0-9]+/g, "");
// Save invite in Mongo
organization.invites.push({
email,
key,
dateCreated: new Date(),
role,
});
organization.markModified("invites");
await organization.save();
let emailSent = false;
if (isEmailEnabled()) {
try {
await sendInviteEmail(organization, key);
emailSent = true;
} catch (e) {
emailSent = false;
}
}
return {
emailSent,
inviteUrl: getInviteUrl(key),
};
}
export async function getEmailFromUserId(userId: string) {
const u = await UserModel.findOne({ id: userId });
return u.email;
}

View File

@@ -0,0 +1,14 @@
import { Client } from "pg";
import { PostgresConnectionParams } from "../../types/integrations/postgres";
export async function runPostgresQuery<T>(
conn: PostgresConnectionParams,
sql: string,
values: string[] = []
): Promise<T[]> {
const client = new Client(conn);
await client.connect();
const res = await client.query(sql, values);
await client.end();
return res.rows;
}

View File

@@ -0,0 +1,51 @@
import { PresentationModel } from "../models/PresentationModel";
import uniqid from "uniqid";
import { PresentationInterface } from "../../types/presentation";
//import {query} from "../config/postgres";
export function getPresentationsByOrganization(organization: string) {
return PresentationModel.find({
organization,
});
}
export function getPresentationById(id: string) {
return PresentationModel.findOne({
id,
});
}
export async function removeExperimentFromPresentations(experiment: string) {
const presentations = await PresentationModel.find({
experimentIds: experiment,
});
await Promise.all(
presentations.map(async (presentation) => {
presentation.experimentIds = presentation.experimentIds.filter(
(id) => id !== experiment
);
presentation.markModified("experimentIds");
await presentation.save();
})
);
}
export async function createPresentation(data: Partial<PresentationInterface>) {
return PresentationModel.create({
// Default values that can be overridden
// The data object passed in
...data,
// Values that cannot be overridden
id: uniqid("pres_"),
dateCreated: new Date(),
dateUpdated: new Date(),
});
}
export function deletePresentationById(id: string) {
return PresentationModel.deleteOne({
id,
});
}

View File

@@ -0,0 +1,336 @@
import { QueryDocument, QueryModel } from "../models/QueryModel";
import {
UsersQueryParams,
MetricValueParams,
SourceIntegrationInterface,
} from "../types/Integration";
import uniqid from "uniqid";
import mongoose from "mongoose";
import {
Queries,
QueryInterface,
QueryPointer,
QueryStatus,
} from "../../types/query";
export type QueryMap = Map<string, QueryDocument>;
export type InterfaceWithQueries = {
runStarted: Date;
queries: Queries;
organization: string;
};
export type DocumentWithQueries = mongoose.Document & InterfaceWithQueries;
async function getExistingQuery(
integration: SourceIntegrationInterface,
query: string
): Promise<QueryDocument | null> {
const lasthour = new Date();
lasthour.setHours(lasthour.getHours() - 1);
const twoMinutesAgo = new Date();
twoMinutesAgo.setMinutes(twoMinutesAgo.getMinutes() - 2);
const existing = await QueryModel.find({
organization: integration.organization,
datasource: integration.datasource,
query,
createdAt: {
$gt: lasthour,
},
status: {
$in: ["running", "succeeded"],
},
})
.sort({ createdAt: -1 })
.limit(5);
for (let i = 0; i < existing.length; i++) {
if (existing[i].status === "succeeded") {
return existing[i];
}
if (existing[i].heartbeat >= twoMinutesAgo) {
return existing[i];
}
}
return null;
}
async function createNewQuery(
integration: SourceIntegrationInterface,
query: string
): Promise<QueryDocument> {
const data: QueryInterface = {
createdAt: new Date(),
datasource: integration.datasource,
finishedAt: null,
heartbeat: new Date(),
id: uniqid("qry_"),
language: integration.getSourceProperties().queryLanguage,
organization: integration.organization,
query,
startedAt: new Date(),
status: "running",
result: null,
error: null,
};
return await QueryModel.create(data);
}
function runBackgroundQuery<T>(run: Promise<T>, doc: QueryDocument) {
// Update heartbeat for the query once every 30 seconds
// This lets us detect orphaned queries where the thread died
const timer = setInterval(() => {
doc.set("heartbeat", new Date());
doc.save();
}, 30000);
run.then((res) => {
clearInterval(timer);
doc.set("finishedAt", new Date());
doc.set("status", "succeeded");
doc.set("result", res);
doc.save();
});
run.catch((e) => {
clearInterval(timer);
doc.set("finishedAt", new Date());
doc.set("status", "failed");
doc.set("error", e.message);
doc.save();
});
}
async function getQueryDoc<T>(
integration: SourceIntegrationInterface,
query: string,
run: (query: string) => Promise<T>
): Promise<QueryDocument> {
// Re-use recent identical query
const existing = await getExistingQuery(integration, query);
if (existing) return existing;
// Otherwise, create a new query in mongo;
const doc = await createNewQuery(integration, query);
// Run the query in the background
runBackgroundQuery<T>(run(query), doc);
return doc;
}
export async function getPastExperiments(
integration: SourceIntegrationInterface,
from: Date
): Promise<QueryDocument> {
return getQueryDoc(
integration,
integration.getPastExperimentQuery(from),
(query: string) => integration.runPastExperimentQuery(query)
);
}
export async function getUsers(
integration: SourceIntegrationInterface,
params: UsersQueryParams
): Promise<QueryDocument> {
return getQueryDoc(
integration,
integration.getUsersQuery(params),
(query: string) => integration.runUsersQuery(query)
);
}
export async function getMetricValue(
integration: SourceIntegrationInterface,
params: MetricValueParams
): Promise<QueryDocument> {
return getQueryDoc(
integration,
integration.getMetricValueQuery(params),
(query: string) => integration.runMetricValueQuery(query)
);
}
export async function getQueryData(
queries: Queries,
organization: string,
map?: QueryMap
): Promise<QueryMap> {
const docs = await QueryModel.find({
organization,
id: {
$in: queries.map((q) => q.query),
},
});
const res: QueryMap = map || new Map();
docs.forEach((doc) => {
const match = queries.filter((q) => q.query === doc.id)[0];
if (!match) return;
res.set(match.name, doc);
});
return res;
}
export async function updateQueryStatuses(
queries: Queries,
organization: string,
onUpdate: (queries: Queries) => Promise<void>,
onSuccess: (queries: Queries, data: QueryMap) => Promise<void>
): Promise<QueryStatus> {
// Group queries by status
const byStatus: Record<QueryStatus, QueryPointer[]> = {
failed: [],
running: [],
succeeded: [],
};
queries.forEach((q) => {
byStatus[q.status].push(q);
});
// If there's at least 1 failed query, the overall status is failed
if (byStatus.failed.length > 0) {
return "failed";
}
// If all of the queries are successful already, the overall status is success
if (byStatus.running.length === 0) {
return "succeeded";
}
// Some queries are still running, fetch the latest statuses
const queryMap = await getQueryData(byStatus.running, organization);
let needsUpdate = false;
byStatus.running.forEach((q) => {
const latest = queryMap.get(q.name);
if (!latest) {
return;
}
let status = latest.status;
if (
status === "running" &&
Date.now() - latest.heartbeat.getTime() > 150000
) {
status = "failed";
}
if (status !== q.status) {
needsUpdate = true;
q.status = latest.status;
}
});
// If all of the queries are finished now for the first time
if (
byStatus.running.filter((q) => q.status === "succeeded").length ===
byStatus.running.length
) {
// Add results from the already successful queries
await getQueryData(byStatus.succeeded, organization, queryMap);
await onSuccess(queries, queryMap);
return "succeeded";
}
// If the queries are still running, but the status needs to get updated
if (needsUpdate) {
onUpdate(queries);
}
return "running";
}
export async function startRun<T>(
docs: { [key: string]: Promise<QueryDocument> },
processResults: (data: QueryMap) => Promise<T>
): Promise<{
queries: Queries;
result?: T;
}> {
const queryData: QueryMap = new Map();
const queries: Queries = await Promise.all(
Object.keys(docs).map(async (k) => {
const doc = await docs[k];
queryData.set(k, doc);
return {
name: k,
query: doc.id,
status: doc.status,
};
})
);
let result;
if (queries.filter((q) => q.status !== "succeeded").length === 0) {
result = await processResults(queryData);
}
return {
queries,
result,
};
}
export async function cancelRun<T extends DocumentWithQueries>(
doc: T,
organization: string
) {
if (!doc) {
throw new Error("Could not find document");
}
if (doc.organization !== organization) {
throw new Error("You do not have access to this document");
}
// Only cancel if it's currently running
if (doc.queries.filter((q) => q.status === "running").length > 0) {
doc.set("queries", []);
doc.set("runStarted", null);
await doc.save();
}
return {
status: 200,
};
}
export async function getStatusEndpoint<T extends DocumentWithQueries, R>(
doc: T,
organization: string,
resultsKey: string,
processResults: (data: QueryMap) => Promise<R>
) {
if (!doc) {
throw new Error("Could not find document");
}
if (doc.organization !== organization) {
throw new Error("You do not have access to this document");
}
const status = await updateQueryStatuses(
doc.queries,
organization,
async (queries: Queries) => {
doc.set("queries", queries);
await doc.save();
},
async (queries: Queries, data: QueryMap) => {
doc.set("queries", queries);
const results = await processResults(data);
doc.set(resultsKey, results);
await doc.save();
}
);
return {
status: 200,
queryStatus: status,
elapsed: Math.floor((Date.now() - doc?.runStarted?.getTime()) / 1000),
finished: doc.queries.filter((q) => q.status === "succeeded").length,
total: doc.queries.length,
};
}

View File

@@ -0,0 +1,68 @@
/*
import {ReportModel, Query, QueryResult} from "../models/ReportModel";
import {cacheKey, cacheGet, cacheSet} from "./redis";
import {query, getDataSourceById} from "../services/datasource";
import uniqid from "uniqid";
export function getAllReportsByOrganization(organization: string) {
return ReportModel.find({
organization
});
}
export function createReport(organization: string) {
return ReportModel.create({
id: uniqid("rep_"),
organization,
title: "New Report",
description: "",
queries: [{
query: "-- Put SQL query here...\n",
showTable: true,
source: "",
visualizations: [],
}],
dateCreated: new Date(),
dateUpdated: new Date()
});
}
export function getReportById(id: string) {
return ReportModel.findOne({
id
});
}
export async function runQuery(id: string, q: Query, useCache: boolean = true): Promise<QueryResult> {
const key = cacheKey(id, q.datasource, q.query);
if (useCache) {
const cached = await cacheGet<QueryResult>(key);
if (cached) {
return cached;
}
}
const datasource = await getDataSourceById(q.datasource);
// TODO: use postgres client specific to this source (with properly scoped permissions)
const rows = await query<{[key: string]: string}>(datasource, q.query);
const result = {
timestamp: new Date(),
rows
};
await cacheSet(key, 3600, result);
return result;
}
export async function runReport(id: string, useCache: boolean = true) {
const report = await getReportById(id);
const resultPromises = report.queries.map(async (q) => runQuery(id, q, useCache));
// TODO: use allSettled so we can get back partial results if a subset of queries fail
const results = await Promise.all(resultPromises);
return results;
}
*/

View File

@@ -0,0 +1,100 @@
import { ServerResponse } from "http";
import { IncomingMessage } from "http";
import crypto from "crypto";
import { SLACK_SIGNING_SECRET } from "../util/secrets";
import { WebClient } from "@slack/web-api";
import {
OrganizationDocument,
OrganizationModel,
} from "../models/OrganizationModel";
import { UserModel } from "../models/UserModel";
// Initialize a single instance for the whole app
const web = new WebClient();
export function verifySlackRequestSignature(
req: IncomingMessage,
res: ServerResponse,
buf: Buffer
) {
console.log(req.headers);
const rawTimestamp = req.headers["x-slack-request-timestamp"];
if (!rawTimestamp || typeof rawTimestamp !== "string") {
throw new Error("Missing or Invalid timestamp");
}
// Verify request happened recently to protect against replay attacks
const timestamp = parseInt(rawTimestamp);
if (Math.abs(timestamp - Date.now() / 1000) > 60 * 5) {
throw new Error("Invalid timestamp");
}
// Hash the request and compare with the signature header to verify
const str = "v0:" + timestamp + ":" + buf.toString();
const sig =
"v0=" +
crypto.createHmac("sha256", SLACK_SIGNING_SECRET).update(str).digest("hex");
const slackSignature = req.headers["x-slack-signature"];
if (!crypto.timingSafeEqual(Buffer.from(sig), Buffer.from(slackSignature))) {
throw new Error("Signatures do not match");
}
}
export async function getUserInfoBySlackId(
slackUserId: string,
organization: OrganizationDocument
): Promise<{ id: null | string; name: null | string }> {
try {
const res = await web.users.info({
token: organization?.connections?.slack?.token,
user: slackUserId,
});
const slackUser = res.user as {
real_name?: string;
profile?: { email?: string };
};
const email = slackUser?.profile?.email;
let id: string | null = null;
if (email) {
const user = await UserModel.findOne({ email });
if (user) {
// Make sure user is part of the organization
if (organization.members.map((m) => m.id).includes(user.id)) {
id = user.id;
}
}
}
// Can't find matching user in our database, just use the full name instead
return {
id,
name: slackUser.real_name || null,
};
} catch (e) {
return {
id: null,
name: null,
};
}
}
export async function getOrganizationFromSlackTeam(
teamId: string
): Promise<OrganizationDocument> {
const organization = await OrganizationModel.findOne({
"connections.slack.team": teamId,
});
if (!organization) {
throw new Error("Unknown slack team id");
}
return organization;
}
export function formatTextResponse(markdown: string) {
return {
text: markdown,
mrkdwn: true,
};
}

View File

@@ -0,0 +1,34 @@
import { Snowflake } from "snowflake-promise";
import { SnowflakeConnectionParams } from "../../types/integrations/snowflake";
export async function runSnowflakeQuery<T>(
conn: SnowflakeConnectionParams,
sql: string,
values: string[] = []
): Promise<T[]> {
const snowflake = new Snowflake({
account: conn.account,
username: conn.username,
password: conn.password,
database: conn.database,
schema: conn.schema,
warehouse: conn.warehouse,
});
await snowflake.connect();
const res = await snowflake.execute(sql, values);
// Annoyingly, Snowflake turns all column names into all caps
// Need to lowercase them here so they match other data sources
const lowercase: T[] = [];
res.forEach((row) => {
// eslint-disable-next-line
const o: any = {};
Object.keys(row).forEach((k) => {
o[k.toLowerCase()] = row[k];
});
lowercase.push(o);
});
return lowercase;
}

View File

@@ -0,0 +1,249 @@
// eslint-disable-next-line
/// <reference path="../types/jstat.d.ts" />
import { jStat } from "jstat";
import { MetricInterface } from "../../types/metric";
import { MetricStats } from "../types/Integration";
export interface ABTestStats {
expected: number;
chanceToWin: number;
ci: [number, number];
buckets: {
x: number;
y: number;
}[];
}
// From: https://www.evanmiller.org/bayesian-ab-testing.html
function binomialChanceToWin(
aSuccess: number,
aFailure: number,
bSuccess: number,
bFailure: number
) {
let total = 0;
for (let i = 0; i < bSuccess; i++) {
total += Math.exp(
jStat.betaln(aSuccess + i + 1, bFailure + aFailure + 2) -
Math.log(bFailure + i + 1) -
jStat.betaln(1 + i, bFailure + 1) -
jStat.betaln(aSuccess + 1, aFailure + 1)
);
}
return total;
}
function countChanceToWin(
aCount: number,
aVisits: number,
bCount: number,
bVisits: number
) {
let total = 0;
for (let k = 0; k < bCount; k++) {
total += Math.exp(
k * Math.log(bVisits) +
aCount * Math.log(aVisits) -
(k + aCount) * Math.log(bVisits + aVisits) -
Math.log(k + aCount) -
jStat.betaln(k + 1, aCount)
);
}
return total;
}
function abTest(
sampleA: () => number,
sampleB: () => number,
chanceToWin: number | null,
expected: number
): ABTestStats {
const NUM_SAMPLES = 1e5;
const HISTOGRAM_BUCKETS = 50;
// Simulate the distributions a bunch of times to get a list of percent changes
const change: number[] = Array(NUM_SAMPLES);
let wins = 0;
for (let i = 0; i < NUM_SAMPLES; i++) {
const a = sampleA();
const b = sampleB();
change[i] = (b - a) / a;
if (change[i] > 0) wins++;
}
change.sort((a, b) => {
return a - b;
});
if (chanceToWin === null) {
chanceToWin = wins / NUM_SAMPLES;
}
// CI are the array elements 2.5% and 97.5% from the start
const ci: [number, number] = [
change[Math.floor(change.length * 0.025)],
change[Math.floor(change.length * 0.975)],
];
// Make a histogram of the data (only include 99% of inner values to remove outliers)
const minValue = change[Math.floor(change.length * 0.005)];
const maxValue = change[Math.floor(change.length * 0.995)];
const bucketSize = (maxValue - minValue) / HISTOGRAM_BUCKETS;
const buckets: { min: number; max: number; count: number }[] = Array(
HISTOGRAM_BUCKETS
);
for (let i = 0; i < HISTOGRAM_BUCKETS; i++) {
buckets[i] = {
min: i * bucketSize + minValue,
max: (i + 1) * bucketSize + minValue,
count: 0,
};
}
// Fill the histogram with the percent changes
let currentBucket = 0;
for (let i = 0; i < change.length; i++) {
if (change[i] < minValue || change[i] > maxValue) {
continue;
}
while (buckets[currentBucket] && change[i] > buckets[currentBucket].max) {
currentBucket++;
}
if (!buckets[currentBucket]) break;
buckets[currentBucket].count++;
}
return {
ci,
expected,
buckets: buckets.map((bucket) => {
// Round to 4 decimal places
const midpoint = parseFloat(((bucket.max + bucket.min) / 2).toFixed(4));
const value = parseFloat((bucket.count / change.length).toFixed(4));
return {
x: midpoint,
y: value,
};
}),
chanceToWin,
};
}
function getExpectedValue(
a: number,
nA: number,
b: number,
nB: number
): number {
const pA = a / nA;
const pB = b / nB;
return (pB - pA) / pA;
}
export function binomialABTest(
aSuccess: number,
aFailure: number,
bSuccess: number,
bFailure: number
) {
return abTest(
() => jStat.beta.sample(aSuccess + 1, aFailure + 1),
() => jStat.beta.sample(bSuccess + 1, bFailure + 1),
binomialChanceToWin(aSuccess, aFailure, bSuccess, bFailure),
getExpectedValue(
aSuccess,
aSuccess + aFailure,
bSuccess,
bSuccess + bFailure
)
);
}
export function countABTest(
aCount: number,
aVisits: number,
bCount: number,
bVisits: number
) {
return abTest(
() => jStat.gamma.sample(aCount, 1 / aVisits),
() => jStat.gamma.sample(bCount, 1 / bVisits),
countChanceToWin(aCount, aVisits, bCount, bVisits),
getExpectedValue(aCount, aVisits, bCount, bVisits)
);
}
export function bootstrapABTest(
aStats: MetricStats,
aVisits: number,
bStats: MetricStats,
bVisits: number,
ignoreNulls: boolean
) {
const getSampleFunction = (stats: MetricStats, visits: number) => {
// Standard error (using the Central Limit Theorem)
const se = stats.stddev / Math.sqrt(stats.count);
if (ignoreNulls) {
return () => jStat.normal.sample(stats.mean, se);
}
// Standard deviation of the conversion rate
const crStddev = Math.sqrt(stats.count * (1 - stats.count / visits));
return () =>
(jStat.normal.sample(stats.count, crStddev) / visits) *
jStat.normal.sample(stats.mean, se);
};
let expected: number;
if (ignoreNulls) {
expected = (bStats.mean - aStats.mean) / aStats.mean;
} else {
const aTotalMean = (aStats.mean * aStats.count) / aVisits;
const bTotalMean = (bStats.mean * bStats.count) / bVisits;
expected = (bTotalMean - aTotalMean) / aTotalMean;
}
return abTest(
getSampleFunction(aStats, aVisits),
getSampleFunction(bStats, bVisits),
null,
expected
);
}
export function getValueCR(
metric: MetricInterface,
value: number,
count: number,
users: number
) {
const base = metric.ignoreNulls ? count : users;
return {
value,
users: base,
cr: base > 0 ? value / base : 0,
};
}
// Sample Ratio Mismatch test
export function srm(users: number[], weights: number[]): number {
// Convert count of users into ratios
let totalObserved = 0;
users.forEach((o) => {
totalObserved += o;
});
if (!totalObserved) {
return 1;
}
let x = 0;
users.forEach((o, i) => {
const e = weights[i] * totalObserved;
x += Math.pow(o - e, 2) / e;
});
return 1 - jStat.chisquare.cdf(x, users.length - 1) || 0;
}

View File

@@ -0,0 +1,42 @@
import { TagModel } from "../models/TagModel";
export async function getAllTags(organization: string) {
const doc = await TagModel.findOne({
organization,
});
if (doc) {
return doc.tags;
}
return [];
}
export async function addTags(organization: string, tags: string[]) {
tags = tags.filter((x) => x.length > 1);
if (!tags.length) return;
await TagModel.updateOne(
{ organization },
{
$addToSet: {
tags: { $each: tags },
},
},
{
upsert: true,
}
);
}
export async function addTagsDiff(
organization: string,
oldTags: string[],
newTags: string[]
) {
if (!oldTags.length) return;
const diff = newTags.filter((x) => !oldTags.includes(x));
if (diff.length) {
console.log(diff);
await addTags(organization, diff);
}
}

View File

@@ -0,0 +1,89 @@
import { UserDocument, UserModel } from "../models/UserModel";
import uniqid from "uniqid";
import crypto from "crypto";
import { IS_CLOUD } from "../util/secrets";
import { promisify } from "util";
import { validatePasswordFormat } from "./auth";
const SALT_LEN = 16;
const HASH_LEN = 64;
const scrypt = promisify(crypto.scrypt);
export async function getUserByEmail(email: string) {
return UserModel.findOne({
email,
});
}
export async function getUserById(id: string) {
return UserModel.findOne({
id,
});
}
export async function getUsersByIds(ids: string[]) {
return UserModel.find({
id: { $in: ids },
});
}
async function hash(password: string): Promise<string> {
const salt = crypto.randomBytes(SALT_LEN).toString("hex");
const derivedKey = await (scrypt(
password,
salt,
HASH_LEN
) as Promise<Buffer>);
return salt + ":" + derivedKey.toString("hex");
}
export async function verifyPassword(
user: UserDocument,
password: string
): Promise<boolean> {
const [salt, key] = user.passwordHash.split(":");
const keyBuffer = Buffer.from(key, "hex");
const derivedKey = await (scrypt(
password,
salt,
HASH_LEN
) as Promise<Buffer>);
return crypto.timingSafeEqual(keyBuffer, derivedKey);
}
export async function updatePassword(userId: string, password: string) {
validatePasswordFormat(password);
const passwordHash = await hash(password);
await UserModel.updateOne(
{
id: userId,
},
{
$set: {
passwordHash,
},
}
);
}
export async function createUser(
name: string,
email: string,
password?: string
) {
let passwordHash = "";
if (!IS_CLOUD) {
validatePasswordFormat(password);
passwordHash = await hash(password);
}
return UserModel.create({
name,
email,
passwordHash,
id: uniqid("u_"),
});
}

View File

@@ -0,0 +1,285 @@
<!DOCTYPE html>
<html lang="en" xmlns="http://www.w3.org/1999/xhtml" xmlns:v="urn:schemas-microsoft-com:vml"
xmlns:o="urn:schemas-microsoft-com:office:office">
<head>
<meta charset="utf-8"> <!-- utf-8 works for most cases -->
<meta name="viewport" content="width=device-width"> <!-- Forcing initial-scale shouldn't be necessary -->
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <!-- Use the latest (edge) version of IE rendering engine -->
<meta name="x-apple-disable-message-reformatting"> <!-- Disable auto-scale in iOS 10 Mail entirely -->
<meta name="format-detection" content="telephone=no,address=no,email=no,date=no,url=no">
<!-- Tell iOS not to automatically link certain text strings. -->
<meta name="color-scheme" content="light">
<meta name="supported-color-schemes" content="light">
<title></title> <!-- The title tag shows in email notifications, like Android 4.4. -->
<!-- What it does: Makes background images in 72ppi Outlook render at correct size. -->
<!--[if gte mso 9]>
<xml>
<o:OfficeDocumentSettings>
<o:AllowPNG/>
<o:PixelsPerInch>96</o:PixelsPerInch>
</o:OfficeDocumentSettings>
</xml>
<![endif]-->
<!-- Web Font / @font-face : BEGIN -->
<!-- NOTE: If web fonts are not required, lines 23 - 41 can be safely removed. -->
<!-- Desktop Outlook chokes on web font references and defaults to Times New Roman, so we force a safe fallback font. -->
<!--[if mso]>
<style>
* {
font-family: sans-serif !important;
}
</style>
<![endif]-->
<!-- All other clients get the webfont reference; some will render the font and others will silently fail to the fallbacks. More on that here: http://stylecampaign.com/blog/2015/02/webfont-support-in-email/ -->
<!--[if !mso]><!-->
<!-- insert web font reference, eg: <link href='https://fonts.googleapis.com/css?family=Roboto:400,700' rel='stylesheet' type='text/css'> -->
<!--<![endif]-->
<!-- Web Font / @font-face : END -->
<!-- CSS Reset : BEGIN -->
<style>
/* What it does: Tells the email client that only light styles are provided but the client can transform them to dark. A duplicate of meta color-scheme meta tag above. */
:root {
color-scheme: light;
supported-color-schemes: light;
}
/* What it does: Remove spaces around the email design added by some email clients. */
/* Beware: It can remove the padding / margin and add a background color to the compose a reply window. */
html,
body {
margin: 0 auto !important;
padding: 0 !important;
height: 100% !important;
width: 100% !important;
}
/* What it does: Stops email clients resizing small text. */
* {
-ms-text-size-adjust: 100%;
-webkit-text-size-adjust: 100%;
}
/* What it does: Centers email on Android 4.4 */
div[style*="margin: 16px 0"] {
margin: 0 !important;
}
/* What it does: forces Samsung Android mail clients to use the entire viewport */
#MessageViewBody,
#MessageWebViewDiv {
width: 100% !important;
}
/* What it does: Stops Outlook from adding extra spacing to tables. */
table,
td {
mso-table-lspace: 0pt !important;
mso-table-rspace: 0pt !important;
}
/* What it does: Fixes webkit padding issue. */
table {
border-spacing: 0 !important;
border-collapse: collapse !important;
table-layout: fixed !important;
margin: 0 auto !important;
}
/* What it does: Uses a better rendering method when resizing images in IE. */
img {
-ms-interpolation-mode: bicubic;
}
/* What it does: Prevents Windows 10 Mail from underlining links despite inline CSS. Styles for underlined links should be inline. */
a {
text-decoration: none;
color: #1f9adb;
}
/* What it does: A work-around for email clients meddling in triggered links. */
a[x-apple-data-detectors],
/* iOS */
.unstyle-auto-detected-links a,
.aBn {
border-bottom: 0 !important;
cursor: default !important;
color: inherit !important;
text-decoration: none !important;
font-size: inherit !important;
font-family: inherit !important;
font-weight: inherit !important;
line-height: inherit !important;
}
/* What it does: Prevents Gmail from displaying a download button on large, non-linked images. */
.a6S {
display: none !important;
opacity: 0.01 !important;
}
/* What it does: Prevents Gmail from changing the text color in conversation threads. */
.im {
color: inherit !important;
}
/* If the above doesn't work, add a .g-img class to any image in question. */
img.g-img+div {
display: none !important;
}
/* What it does: Removes right gutter in Gmail iOS app: https://github.com/TedGoas/Cerberus/issues/89 */
/* Create one of these media queries for each additional viewport size you'd like to fix */
/* iPhone 4, 4S, 5, 5S, 5C, and 5SE */
@media only screen and (min-device-width: 320px) and (max-device-width: 374px) {
u~div .email-container {
min-width: 320px !important;
}
}
/* iPhone 6, 6S, 7, 8, and X */
@media only screen and (min-device-width: 375px) and (max-device-width: 413px) {
u~div .email-container {
min-width: 375px !important;
}
}
/* iPhone 6+, 7+, and 8+ */
@media only screen and (min-device-width: 414px) {
u~div .email-container {
min-width: 414px !important;
}
}
</style>
<!-- CSS Reset : END -->
<!-- Progressive Enhancements : BEGIN -->
<style>
/* What it does: Hover styles for buttons */
.button-td,
.button-a {
transition: all 100ms ease-in;
}
.button-td-primary:hover,
.button-a-primary:hover {
background: #129CD1 !important;
border-color: #129CD1 !important;
}
/* Media Queries */
@media screen and (max-width: 600px) {
/* What it does: Adjust typography on small screens to improve readability */
.email-container p {
font-size: 17px !important;
}
}
</style>
<!-- Progressive Enhancements : END -->
</head>
<!--
The email background color (#222222) is defined in three places:
1. body tag: for most email clients
2. center tag: for Gmail and Inbox mobile apps and web versions of Gmail, GSuite, Inbox, Yahoo, AOL, Libero, Comcast, freenet, Mail.ru, Orange.fr
3. mso conditional: For Windows 10 Mail
-->
<body width="100%" style="margin: 0; padding: 0 !important; mso-line-height-rule: exactly; background-color: #f0faff;">
<center role="article" aria-roledescription="email" lang="en" style="width: 100%; background-color: #f0faff;">
<!--[if mso | IE]>
<table role="presentation" border="0" cellpadding="0" cellspacing="0" width="100%" style="background-color: #222222;">
<tr>
<td>
<![endif]-->
<!-- Visually Hidden Preheader Text : BEGIN -->
<div style="max-height:0; overflow:hidden; mso-hide:all;" aria-hidden="true">
{% block preview %}{% endblock %}
</div>
<!-- Visually Hidden Preheader Text : END -->
<!-- Create white space after the desired preview text so email clients dont pull other distracting text into the inbox preview. Extend as necessary. -->
<!-- Preview Text Spacing Hack : BEGIN -->
<div
style="display: none; font-size: 1px; line-height: 1px; max-height: 0px; max-width: 0px; opacity: 0; overflow: hidden; mso-hide: all; font-family: sans-serif;">
&zwnj;&nbsp;&zwnj;&nbsp;&zwnj;&nbsp;&zwnj;&nbsp;&zwnj;&nbsp;&zwnj;&nbsp;&zwnj;&nbsp;&zwnj;&nbsp;&zwnj;&nbsp;&zwnj;&nbsp;&zwnj;&nbsp;&zwnj;&nbsp;&zwnj;&nbsp;&zwnj;&nbsp;&zwnj;&nbsp;&zwnj;&nbsp;&zwnj;&nbsp;&zwnj;&nbsp;
</div>
<!-- Preview Text Spacing Hack : END -->
<!--
Set the email width. Defined in two places:
1. max-width for all clients except Desktop Windows Outlook, allowing the email to squish on narrow but never go wider than 600px.
2. MSO tags for Desktop Windows Outlook enforce a 600px width.
-->
<div style="max-width: 600px; margin: 0 auto;" class="email-container">
<!--[if mso]>
<table align="center" role="presentation" cellspacing="0" cellpadding="0" border="0" width="600">
<tr>
<td>
<![endif]-->
<!-- Email Body : BEGIN -->
<table align="center" role="presentation" cellspacing="0" cellpadding="0" border="0" width="100%"
style="margin: auto;">
<!-- Email Header : BEGIN -->
<tr>
<td style="padding: 20px 0; text-align: center">
<img src="https://growthbook.io/logos/growthbook-logo.png" width="264" height="48" alt="Growth Book"
border="0"
style="height: auto; font-family: sans-serif; font-size: 15px; line-height: 15px; color: #555555;">
</td>
</tr>
<!-- Email Header : END -->
<!-- 1 Column Text + Button : BEGIN -->
<tr>
<td style="background-color: #ffffff;">
<table role="presentation" cellspacing="0" cellpadding="0" border="0" width="100%">
<tr>
<td style="padding: 20px; font-family: sans-serif; font-size: 15px; line-height: 20px; color: #555555;">
<h1
style="margin: 0 0 10px 0; font-family: sans-serif; font-size: 25px; line-height: 30px; color: #333333; font-weight: normal;">
{% block title %}{% endblock %}</h1>
{% block top %}{% endblock %}
</td>
</tr>
<tr>
<td style="padding: 20px; font-family: sans-serif; font-size: 15px; line-height: 20px; color: #555555;">
{% block bottom %}{% endblock %}
</td>
</tr>
</table>
</td>
</tr>
<!-- 1 Column Text + Button : END -->
</table>
<!-- Email Body : END -->
<!--[if mso]>
</td>
</tr>
</table>
<![endif]-->
</div>
<!--[if mso | IE]>
</td>
</tr>
</table>
<![endif]-->
</center>
</body>
</html>

View File

@@ -0,0 +1,24 @@
{% extends "base.jinja" %}
{% import "utils.jinja" as utils %}
{% block preview %}
Experiment {{ experimentName }} has a significant metric
{% endblock %}
{% block title %}
The experiment '{{ experimentName }}' has a significant metric
{% endblock %}
{% block top %}
<p>The following metrics for this experiment have changed:</p>
<ul style='padding: 0; margin: 0 0 10px 0; list-style-type: disc;'>
{% for change in experimentChanges %}
<li style='margin:0 0 10px 30px;'>{{ change }}</li>
{% endfor %}
</ul>
{{ utils.button("See full results", experimentUrl) }}
{% endblock %}
{% block bottom %}
<p style='margin: 30px 0;'>You are receiving this email because you are watching this experiment.</p>
{% endblock %}

View File

@@ -0,0 +1,19 @@
{% extends "base.jinja" %}
{% import "utils.jinja" as utils %}
{% block preview %}
Click here to accept the invitation
{% endblock %}
{% block title %}
<p style='text-align:center; margin: 0 0 30px;'>Join {{organizationName}} on Growthbook</p>
{% endblock %}
{% block top %}
<p style='text-align:center; margin: 0 0 30px;'>Click below to accept the invitation:</p>
{{ utils.button("Join", inviteUrl) }}
{% endblock %}
{% block bottom %}
<p style='margin: 30px 0;'>If the above button does not work, go to the url: {{inviteUrl}}</p>
{% endblock %}

View File

@@ -0,0 +1,20 @@
{% extends "base.jinja" %}
{% block preview %}
{{ email }} at {{ company }}
{% endblock %}
{% block title %}
New Organization Joined Growth Book
{% endblock %}
{% block top %}
<p>
Company: {{ company }}<br/>
Email: {{ email }}
</p>
{% endblock %}
{% block lower %}
<p>You are receiving this email because you are an admin.</p>
{% endblock %}

View File

@@ -0,0 +1,20 @@
{% extends "base.jinja" %}
{% import "utils.jinja" as utils %}
{% block preview %}
Click here to reset your password
{% endblock %}
{% block title %}
<p style='text-align:center; margin: 0 0 30px;'>Reset Growth Book Password</p>
{% endblock %}
{% block top %}
<p style='text-align:center; margin: 0 0 30px;'>Click below to reset your password:</p>
{{ utils.button("Reset", resetUrl) }}
{% endblock %}
{% block bottom %}
<p style='margin: 30px 0;'>If the above button does not work, go to the url: {{resetUrl}}</p>
<p>The link will expire in 30 minutes</p>
{% endblock %}

View File

@@ -0,0 +1,11 @@
{% macro button(display, url) %}
<table align="center" role="presentation" cellspacing="0" cellpadding="0" border="0" style="margin: auto;">
<tr>
<td class="button-td button-td-primary" style="border-radius: 4px; background: #0991CE;">
<a class="button-a button-a-primary" href="{{ url }}"
style="background: #0991CE; border: 0px solid #000000; font-family: sans-serif; font-size: 15px; line-height: 15px; text-decoration: none; padding: 13px 17px; color: #ffffff; display: block; border-radius: 4px;">{{
display }}</a>
</td>
</tr>
</table>
{% endmacro %}

View File

@@ -0,0 +1,15 @@
import { Request } from "express";
import { OrganizationDocument } from "../models/OrganizationModel";
import { AuditInterface } from "../../types/audit";
import { Permissions } from "../../types/organization";
// eslint-disable-next-line
export type AuthRequest<T = any> = Request<null, null, T> & {
email: string;
userId?: string;
name?: string;
admin?: boolean;
organization?: OrganizationDocument;
permissions: Permissions;
audit: (data: Partial<AuditInterface>) => Promise<void>;
};

View File

@@ -0,0 +1,132 @@
import {
DataSourceProperties,
DataSourceSettings,
} from "../../types/datasource";
import { DimensionInterface } from "../../types/dimension";
import { ExperimentInterface, ExperimentPhase } from "../../types/experiment";
import { MetricInterface } from "../../types/metric";
import { SegmentInterface } from "../../types/segment";
export interface MetricStats {
count: number;
stddev: number;
mean: number;
}
export type VariationMetricResult = MetricStats & {
metric: string;
};
export interface VariationResult {
variation: number;
users: number;
metrics: VariationMetricResult[];
}
export interface DimensionResult {
dimension: string;
variations: VariationResult[];
}
export interface ExperimentResults {
query: string;
results: DimensionResult[];
}
export interface ImpactEstimationResult {
query: string;
metricTotal: number;
users: number;
value: number;
}
export type UsersQueryParams = {
name: string;
userIdType: "anonymous" | "user" | "either";
segmentQuery?: string;
segmentName?: string;
urlRegex?: string;
from: Date;
to: Date;
conversionWindow: number;
includeByDate?: boolean;
};
export type UsersResult = {
users: number;
dates?: {
date: string;
users: number;
}[];
};
export type MetricValueParams = {
from: Date;
to: Date;
metric: MetricInterface;
name?: string;
userIdType?: "anonymous" | "user" | "either";
segmentQuery?: string;
segmentName?: string;
urlRegex?: string;
conversionWindow?: number;
includeByDate?: boolean;
includePercentiles?: boolean;
};
export type MetricValueResultDate = {
date: string;
count?: number;
mean?: number;
stddev?: number;
};
export type MetricValueResult = {
count?: number;
stddev?: number;
mean?: number;
percentiles?: {
[key: string]: number;
};
dates?: MetricValueResultDate[];
};
export type PastExperimentResult = {
experiments: {
experiment_id: string;
variation_id: string;
start_date: Date;
end_date: Date;
users: number;
}[];
};
export interface SourceIntegrationConstructor {
new (
encryptedParams: string,
settings: DataSourceSettings
): SourceIntegrationInterface;
}
export interface SourceIntegrationInterface {
datasource: string;
organization: string;
// eslint-disable-next-line
getNonSensitiveParams(): any;
getExperimentResults(
experiment: ExperimentInterface,
phase: ExperimentPhase,
metrics: MetricInterface[],
activationMetric: MetricInterface | null,
dimension: DimensionInterface | null
): Promise<ExperimentResults>;
testConnection(): Promise<boolean>;
getSourceProperties(): DataSourceProperties;
getImpactEstimation(
urlRegex: string,
metric: MetricInterface,
segment?: SegmentInterface
): Promise<ImpactEstimationResult>;
getUsersQuery(params: UsersQueryParams): string;
getMetricValueQuery(params: MetricValueParams): string;
runUsersQuery(query: string): Promise<UsersResult>;
runMetricValueQuery(query: string): Promise<MetricValueResult>;
getPastExperimentQuery(from: Date): string;
runPastExperimentQuery(query: string): Promise<PastExperimentResult>;
}

View File

@@ -0,0 +1 @@
declare module 'jstat';

View File

@@ -0,0 +1,14 @@
import winston from "winston";
const options: winston.LoggerOptions = {
transports: [
new winston.transports.Console({
level: process.env.NODE_ENV === "production" ? "error" : "debug",
}),
new winston.transports.File({ filename: "debug.log", level: "debug" }),
],
};
const logger = winston.createLogger(options);
export default logger;

View File

@@ -0,0 +1,64 @@
import dotenv from "dotenv";
import fs from "fs";
export const ENVIRONMENT = process.env.NODE_ENV;
const prod = ENVIRONMENT === "production";
if (!prod) {
if (fs.existsSync(".env.local")) {
dotenv.config({ path: ".env.local" });
} else {
throw new Error("Missing dev env file. Run: cp .env.example .env.local");
}
}
export const IS_CLOUD = !!process.env.IS_CLOUD;
export const MONGODB_URI = process.env.MONGODB_URI;
if (!MONGODB_URI) {
throw new Error("Missing MONGODB_URI environment variable");
}
export const APP_ORIGIN = process.env.APP_ORIGIN || "http://localhost:3000";
const corsOriginRegex = process.env.CORS_ORIGIN_REGEX;
export const CORS_ORIGIN_REGEX = corsOriginRegex
? new RegExp(corsOriginRegex, "i")
: null;
export const GOOGLE_OAUTH_CLIENT_ID = process.env.GOOGLE_OAUTH_CLIENT_ID || "";
export const GOOGLE_OAUTH_CLIENT_SECRET =
process.env.GOOGLE_OAUTH_CLIENT_SECRET || "";
export const S3_BUCKET = process.env.S3_BUCKET || "";
export const S3_REGION = process.env.S3_REGION || "us-east-1";
export const S3_DOMAIN =
process.env.S3_DOMAIN || `https://${S3_BUCKET}.s3.amazonaws.com/`;
export const ENCRYPTION_KEY = process.env.ENCRYPTION_KEY || "dev";
if (prod && ENCRYPTION_KEY === "dev") {
throw new Error("Must specify ENCRYPTION_KEY environment variable");
}
export const JWT_SECRET = process.env.JWT_SECRET || "dev";
if (prod && !IS_CLOUD && JWT_SECRET === "dev") {
throw new Error("Must specify JWT_SECRET environment variable");
}
export const EMAIL_ENABLED = process.env.EMAIL_ENABLED === "true";
export const EMAIL_HOST = process.env.EMAIL_HOST;
export const EMAIL_PORT = parseInt(process.env.EMAIL_PORT) || 587;
export const EMAIL_HOST_USER = process.env.EMAIL_HOST_USER;
export const EMAIL_HOST_PASSWORD = process.env.EMAIL_HOST_PASSWORD;
export const EMAIL_USE_TLS = !!process.env.EMAIL_USE_TLS;
export const EMAIL_FROM = process.env.EMAIL_FROM;
export const SITE_MANAGER_EMAIL = process.env.SITE_MANAGER_EMAIL;
export const STRIPE_SECRET = process.env.STRIPE_SECRET;
export const STRIPE_PRICE = process.env.STRIPE_PRICE;
export const STRIPE_WEBHOOK_SECRET = process.env.STRIPE_WEBHOOK_SECRET;
export const STRIPE_DEFAULT_COUPON = process.env.STRIPE_DEFAULT_COUPON;
export const SLACK_SIGNING_SECRET = process.env.SLACK_SIGNING_SECRET || "";
const testConn = process.env.POSTGRES_TEST_CONN;
export const POSTGRES_TEST_CONN = testConn ? JSON.parse(testConn) : {};

View File

@@ -0,0 +1,12 @@
import request from "supertest";
import app from "../src/app";
describe("GET /random-url", () => {
it("should return 401", (done) => {
request(app).get("/random-url").expect(401, done);
});
});
describe("POST /track", () => {
// TODO: test valid and invalid track calls
});

View File

@@ -0,0 +1,312 @@
/* eslint-disable @typescript-eslint/no-use-before-define */
/* eslint-disable @typescript-eslint/no-var-requires */
const ObjectToCsv = require("objects-to-csv");
const fs = require("fs");
const args = process.argv.slice(2);
const outputDirectory = "./dummy/";
fs.mkdirSync(outputDirectory, { recursive: true });
// input data:
const p = require("./data.json");
const numUsers = args[0] ? parseInt(args[0]) : 10000;
const newPageChance = 0.45;
const newSessionChance = 0.3;
const signupChance = 0.2;
const signupConversionChance = 0.5;
const purchaseRequiresReg = true;
const purchaseChance = 0.1; // if you have to be registered as well, so the actual odds of this are much lower.
const purchaseAmount = [10, 100]; // min price, max price.
const experimentChance = 0.8; // for each user, the precent chance they'll be in an experiment.
const startUserId = 1;
const startSessionId = 1;
// time will be divided roughly evenly between these dates.
const d = new Date();
d.setMonth(d.getMonth() - 2);
const startDate = d.toDateString();
const endDate = new Date().toDateString();
// max time intervals between events, in seconds (randomized):
const smallTimeEvent = 20; // twenty seconds
const largeTimeEvent = 24 * 60 * 60; // sessions, 1 day
const genders = ["male", "female", ""];
// ---
const users = [];
const pages = [];
const experiment_viewed = [];
const viewed_signup = [];
const signup = [];
const sessions = [];
const purchase = [];
const dateRange = Date.parse(endDate) - Date.parse(startDate);
const incrementAvg = dateRange / numUsers;
let currentTime = Date.parse(startDate);
let sessionId = startSessionId;
const allPages = [
...p.pages.core,
...p.pages.blog,
...p.pages.core,
...p.pages.core,
...p.pages.core,
];
for (let i = 1; i < numUsers; i++) {
// increase the clock by a bit
currentTime =
currentTime + incrementAvg + (Math.random() - 0.5) * incrementAvg;
// save the time into a user space, which will be incremented more agressively
let userTime = currentTime;
const userId = startUserId + i;
let registered = false;
let purchased = false;
let userSignupChance = signupChance;
let userRegisterChance = signupConversionChance;
let userPurchaseChance = purchaseChance;
let userPurchaseAmount = purchaseAmount;
let userNewPageChance = newPageChance;
let sessionInfo = {
session_id: sessionId,
user_id: userId,
date_start: format_time(userTime),
date_end: "",
duration_seconds: 0,
num_pages: 1,
};
// add to user table
users.push({
user_id: userId,
received_at: format_time(userTime),
gender: genders[Math.floor(Math.random() * genders.length)],
});
// add a page view
pages.push({
user_id: userId,
received_at: format_time(userTime),
path: allPages[Math.floor(Math.random() * allPages.length)],
});
// do we put them in an experiment?
p.experiments.forEach((exp) => {
if (Math.random() >= experimentChance) return;
const varIndex = Math.round(Math.random()); // 0 or 1
experiment_viewed.push({
user_id: userId,
received_at: format_time(userTime),
experiment_id: exp.id,
variation_id: varIndex,
});
// does this experiment effect the outcome of anything?
if (varIndex > 0) {
if (exp.effects === "purchase") {
userPurchaseChance *= exp.effectWeight;
} else if (exp.effects === "purchaseAmount") {
userPurchaseChance *= exp.effectWeight;
userPurchaseAmount = exp.amount;
} else if (exp.effects === "registration") {
userRegisterChance *= exp.effectWeight;
} else if (exp.effects === "signup") {
userSignupChance *= exp.effectWeight;
} else if (exp.effects === "pagesPerVisit") {
userNewPageChance *= exp.effectWeight;
}
}
});
userTime = increment_time(userTime);
while (Math.random() < userNewPageChance) {
if (Math.random() < newSessionChance) {
// lets end the previous session and start a new one:
(sessionInfo.date_end = format_time(userTime)),
// convert to seconds
(sessionInfo.duration_seconds = Math.round(
(Date.parse(sessionInfo.date_end) -
Date.parse(sessionInfo.date_start)) /
1000
));
sessions.push({ ...sessionInfo });
sessionId++;
userTime = increment_time(userTime, true);
sessionInfo = {
session_id: sessionId,
user_id: userId,
date_start: format_time(userTime),
date_end: "",
duration_seconds: 0,
num_pages: 1,
};
}
// add page:
pages.push({
user_id: userId,
received_at: format_time(userTime),
path: allPages[Math.floor(Math.random() * allPages.length)],
});
// add to session visit
sessionInfo.num_pages++;
// registration:
if (!registered && Math.random() < userSignupChance) {
// signup viewed:
viewed_signup.push({
user_id: userId,
received_at: format_time(userTime),
});
// add the register flow pages:
userTime = increment_time(userTime);
pages.push({
user_id: userId,
received_at: format_time(userTime),
path: p.pages.register[0],
});
sessionInfo.num_pages++;
// did they actual sign up?
if (Math.random() < userRegisterChance) {
registered = true;
userTime = increment_time(userTime);
pages.push({
user_id: userId,
received_at: format_time(userTime),
path: p.pages.register[1],
});
sessionInfo.num_pages++;
signup.push({
user_id: userId,
received_at: format_time(userTime),
});
}
}
if (
((purchaseRequiresReg && registered) || !purchaseRequiresReg) &&
!purchased &&
Math.random() < userPurchaseChance
) {
purchased = true;
// add the purchase flow pages:
for (let n = 0; n < p.pages.purchase.length; n++) {
userTime = increment_time(userTime);
pages.push({
user_id: userId,
received_at: format_time(userTime),
path: p.pages.purchase[n],
});
sessionInfo.num_pages++;
}
// add purchase to purchase stats
purchase.push({
user_id: userId,
received_at: format_time(userTime),
amount: Math.round(
Math.random() * (userPurchaseAmount[1] - userPurchaseAmount[0]) +
userPurchaseAmount[0]
),
});
}
userTime = increment_time(userTime);
}
// save the session info:
sessionInfo.date_end = format_time(userTime);
sessionInfo.duration_seconds = Math.round(
(Date.parse(sessionInfo.date_end) - Date.parse(sessionInfo.date_start)) /
1000
);
sessions.push({ ...sessionInfo });
sessionId++;
}
// helper functions:
function increment_time(t, large) {
if (large) return t + Math.round(Math.random() * largeTimeEvent * 1000);
return t + Math.round(Math.random() * smallTimeEvent * 1000);
}
function format_time(t) {
const m = new Date(t);
return m.toISOString().substr(0, 19).replace(/T/, " ");
}
function dateSort(a, b) {
if (Date.parse(a.received_at) > Date.parse(b.received_at)) {
return 1;
} else if (Date.parse(a.received_at) < Date.parse(b.received_at)) {
return -1;
}
return 0;
}
const experiments = {};
experiment_viewed.forEach((e) => {
experiments[e.experiment_id] = experiments[e.experiment_id] || [0, 0];
experiments[e.experiment_id][e.variation_id]++;
});
console.log(experiments);
// Sort by date and write to file:
new ObjectToCsv(users.sort(dateSort)).toDisk(outputDirectory + "users.csv");
console.log("saved ", outputDirectory + "users.csv", users.length);
new ObjectToCsv(pages.sort(dateSort)).toDisk(outputDirectory + "pages.csv");
console.log("saved ", outputDirectory + "pages.csv", pages.length);
new ObjectToCsv(sessions.sort(dateSort)).toDisk(
outputDirectory + "sessions.csv"
);
console.log("saved ", outputDirectory + "sessions.csv", sessions.length);
new ObjectToCsv(experiment_viewed.sort(dateSort)).toDisk(
outputDirectory + "experiment_viewed.csv"
);
console.log(
"saved ",
outputDirectory + "experiment_viewed.csv",
experiment_viewed.length
);
new ObjectToCsv(viewed_signup.sort(dateSort)).toDisk(
outputDirectory + "viewed_signup.csv"
);
console.log(
"saved ",
outputDirectory + "viewed_signup.csv",
viewed_signup.length
);
new ObjectToCsv(signup.sort(dateSort)).toDisk(outputDirectory + "signup.csv");
console.log("saved ", outputDirectory + "signup.csv", signup.length);
new ObjectToCsv(purchase.sort(dateSort)).toDisk(
outputDirectory + "purchase.csv"
);
console.log("saved ", outputDirectory + "purchase.csv", purchase.length);
fs.writeFileSync(
outputDirectory + "mongo.json",
JSON.stringify({
startDate,
endDate,
experiments: Object.keys(experiments),
})
);

View File

@@ -0,0 +1,282 @@
{
"experiments": [
{
"id": "purchase_cta",
"effects": "purchase",
"effectWeight": 1.1
},
{
"id": "green_buttons",
"effects": "purchase",
"effectWeight": 1
},
{
"id": "simple_registration",
"effects": "signup",
"effectWeight": 0.8
},
{
"id": "responsive_nav",
"effects": "registration",
"effectWeight": 1.3
},
{
"id": "expanded_product_info",
"effects": "purchaseAmount",
"effectWeight": 0.9,
"amount": [30, 130]
},
{
"id": "longer_articles",
"effects": "pagesPerVisit",
"effectWeight": 1.1
}
],
"pages": {
"core": [
"/",
"/features",
"/product/marketing",
"/product/engineering",
"/product/pms",
"/stats",
"/why-us",
"/compared",
"/about",
"/privacy",
"/company"
],
"register": ["/register", "/welcome"],
"purchase": ["/pricing", "/checkout", "/confirmation"],
"blog": [
"/blog/drip-marketing",
"/blog/copper-introduces-redesigned-projects",
"/blog/keep-up-with-threads-in-copper-with-improved-gmail-collaboration",
"/blog/what-copper-users-need-to-know-about-g-suite-in-2020",
"/blog/11-quick-ways-to-optimize-your-sales-performance",
"/blog/how-to-help-your-team-thrive-in-the-age-of-remote-work",
"/blog/customer-onboarding",
"/blog/sales-presentation",
"/blog/customer-service-oriented",
"/blog/build-business-relationships",
"/blog/client-intake-form",
"/blog/how-to-ask-for-testimonials",
"/blog/client-interactions",
"/blog/sales-objections",
"/blog/hooray-the-new-and-improved-g-suite-add-on-is-here",
"/blog/good-customer-service",
"/blog/sales-process",
"/blog/customer-empathy",
"/blog/how-to-keep-customers-coming-back",
"/blog/happy-customer",
"/blog/how-to-get-past-the-gatekeeper",
"/blog/pipeline-stages",
"/blog/sales-role-play",
"/blog/how-to-create-a-knowledge-base",
"/blog/small-business-saturday",
"/blog/how-to-stay-organized-at-work",
"/blog/how-to-sell-over-the-phone",
"/blog/lead-management-process",
"/blog/manage-sales-leads",
"/blog/how-to-manage-leads-using-g-suite-apps",
"/blog/customer-service-skills",
"/blog/copper-crm-vs-hubspot",
"/blog/tools-to-manage-leads",
"/blog/automate-sales-tasks",
"/blog/customer-service-email-templates",
"/blog/branding-pre-mortem",
"/blog/how-to-create-a-small-business-marketing-plan",
"/blog/new-features-you-been-waiting-for",
"/blog/how-to-create-a-hyperlink-in-gmail",
"/blog/how-to-deal-with-difficult-customers",
"/blog/sales-productivity",
"/blog/how-to-recall-an-email-in-gmail",
"/blog/how-to-set-an-appointment",
"/blog/branding-lessons-for-small-businesses",
"/blog/how-to-resend-an-email-in-gmail",
"/blog/how-to-add-signature-in-gmail-with-logo",
"/blog/how-to-mail-merge-in-gmail",
"/blog/how-to-send-html-email-in-gmail",
"/blog/how-to-get-clients",
"/blog/customer-feedback",
"/blog/how-to-organize-business-cards",
"/blog/how-to-send-emails-to-undisclosed-recipients-in-gmail",
"/blog/customer-needs",
"/blog/sales-trends",
"/blog/email-marketing-best-practices",
"/blog/fundraising-crm-adrenaline",
"/blog/copper-appealie-saas-award",
"/blog/how-to-whitelist-email-in-gmail",
"/blog/sales-bonus",
"/blog/new-feature-inspired-by-you",
"/blog/how-to-send-bulk-email-without-spamming",
"/blog/how-to-build-an-email-list",
"/blog/instagram-for-small-business",
"/blog/newsletter-subject-lines",
"/blog/shared-inboxes",
"/blog/crisis-communication-plan",
"/blog/how-to-start-a-consulting-business-in-12-steps",
"/blog/how-sail-internet-tripled-revenue-in-1-year-with-help-from-copper",
"/blog/how-to-create-a-newsletter",
"/blog/square-vs-stripe",
"/blog/motivational-sales-quotes",
"/blog/sign-up-for-our-newsletter",
"/blog/sales-email-statistics",
"/blog/value-based-selling",
"/blog/cold-calling-script",
"/blog/canned-response-gmail",
"/blog/sales-quotas",
"/blog/how-to-build-rapport-with-clients",
"/blog/sales-activities",
"/blog/inbox-zero",
"/blog/copper-in-canada",
"/blog/open-house-follow-up-email",
"/blog/marketing-strategies-for-consultants",
"/blog/sales-pitch-examples",
"/blog/how-to-get-consulting-clients",
"/blog/sales-follow-up-emails",
"/blog/linkedin-for-small-business",
"/blog/trade-show-follow-up-emails",
"/blog/sales-targets",
"/blog/sales-territory-plan",
"/blog/welcome-email",
"/blog/facebook-for-small-business",
"/blog/sales-prospecting-plan",
"/blog/how-to-take-meeting-notes",
"/blog/losing-customers",
"/blog/consulting-rates",
"/blog/sales-meeting",
"/blog/small-business-leads",
"/blog/sales-battlecards",
"/blog/how-to-ask-for-the-sale",
"/blog/copper-crm-constellation-q3-2019",
"/blog/copper-crm-top-startup",
"/blog/how-to-thank-client-for-their-business",
"/blog/how-to-make-an-email-list-in-gmail",
"/blog/sales-goals",
"/blog/gmail-shortcuts",
"/blog/sales-objectives",
"/blog/how-to-close-a-sale",
"/blog/you-asked-we-listened",
"/blog/types-of-sales-pitches",
"/blog/copper-crm-inc-5000",
"/blog/linkedin-sales-prospecting",
"/blog/create-calendar-google-docs",
"/blog/email-sales-pitch",
"/blog/small-business-loyalty-programs",
"/blog/how-small-businesses-can-master-twitter",
"/blog/customer-winback",
"/blog/copper-best-smb-crm-martech-awards",
"/blog/sajari-crm-case-study",
"/blog/sales-team",
"/blog/google-hangouts-hacks",
"/blog/small-business-marketing",
"/blog/b2b-customer-journey-mapping",
"/blog/saas-customer-retention",
"/blog/creative-agency-relationships-crm",
"/blog/customer-loyalty",
"/blog/gmail-hacks",
"/blog/how-to-schedule-a-meeting",
"/blog/gartner-magic-quadrant",
"/blog/customer-appreciation-ideas",
"/blog/how-to-respond-to-google-reviews",
"/blog/how-to-write-a-case-study",
"/blog/client-relationships",
"/blog/google-calendar-planning",
"/blog/how-to-deal-with-angry-customers",
"/blog/google-docs-hacks",
"/blog/touch-base-email",
"/blog/how-to-respond-to-negative-yelp-reviews",
"/blog/google-drive-hacks",
"/blog/sales-funnel-management",
"/blog/customer-reviews",
"/blog/leads-vs-opportunities",
"/blog/google-calendar-hacks",
"/blog/sales-pitch",
"/blog/conference-networking",
"/blog/selling-styles",
"/blog/customer-retention",
"/blog/apology-letter-to-customers",
"/blog/customer-journey-mapping",
"/blog/jump-450-agency-crm-case-study",
"/blog/customer-marketing",
"/blog/sales-notes",
"/blog/sales-email-subject-lines",
"/blog/unique-selling-proposition",
"/blog/sales-experts",
"/blog/sales-forecasting",
"/blog/google-next",
"/blog/sales-deck",
"/blog/transactional-selling",
"/blog/sales-enablement",
"/blog/sales-account-planning",
"/blog/sales-rep-kpis",
"/blog/sales-campaign",
"/blog/copper-docusign-integration",
"/blog/follow-up-sales-call-script",
"/blog/saas-sales-metrics",
"/blog/outbound-sales-call-script",
"/blog/sales-manager-kpi",
"/blog/how-to-succeed-in-sales",
"/blog/sales-executive-kpi",
"/blog/types-of-sales-jobs",
"/blog/how-to-improve-sales-skills",
"/blog/value-added-selling",
"/blog/real-estate-websites",
"/blog/cold-calling-scripts-for-appointments",
"/blog/sales-operations",
"/blog/how-to-improve-sales-performance",
"/blog/business-development-vs-sales",
"/blog/sales-dashboard",
"/blog/cold-call",
"/blog/sales-script-vs-no-sales-script",
"/blog/insurance-cold-calling-scripts",
"/blog/best-time-to-cold-call",
"/blog/close-sales-calls",
"/blog/real-estate-cold-calling",
"/blog/how-to-boost-sales",
"/blog/saas-sales",
"/blog/cold-calling-techniques",
"/blog/cold-calling-success-rates",
"/blog/enterprise-sales",
"/blog/does-cold-calling-work",
"/blog/mrr-monthly-recurring-revenue",
"/blog/faint-sales-framework",
"/blog/overcoming-price-objections",
"/blog/lead-conversion",
"/blog/customer-lifetime-value-formula",
"/blog/sales-cycle",
"/blog/generate-sales-leads",
"/blog/wild-earth-case-study",
"/blog/sales-skills",
"/blog/sales-emails-that-get-responses",
"/blog/sales-manager-interview-questions",
"/blog/sales-productivity-metrics",
"/blog/sales-rejection",
"/blog/best-sales-apps",
"/blog/business-card-alternatives",
"/blog/anum-framework",
"/blog/gifts-for-sales",
"/blog/meddic-framework",
"/blog/customer-success-crm",
"/blog/probing-questions",
"/blog/upselling",
"/blog/crm-user-experience",
"/blog/instagram-social-selling",
"/blog/sales-resume-skills",
"/blog/sales-interview-questions",
"/blog/account-executive",
"/blog/body-language-sales",
"/blog/champ-framework",
"/blog/sales-velocity",
"/blog/best-bluetooth-headsets",
"/blog/best-movie-quotes-sales",
"/blog/google-next-2019-recap",
"/blog/crm-onboarding",
"/blog/linkedin-summary-examples",
"/blog/sales-management-fast-growth",
"/blog/email-deliverability",
"/blog/online-sales-funnel"
]
}
}

Some files were not shown because too many files have changed in this diff Show More