Compare commits

...

73 Commits

Author SHA1 Message Date
timvisee
72377d3438 Bump version to 3.4.10 2021-05-07 13:09:37 +02:00
timvisee
512c9803bd Enable base URL detection by default with npm start, remove FXA_CLIENT_ID 2021-05-07 13:07:26 +02:00
timvisee
4c45d6217d Properly derive base URL as configured in file upload logic
Fixes https://github.com/timvisee/send/issues/29
2021-05-07 13:07:17 +02:00
timvisee
b4b8060a78 Update dependencies 2021-05-07 12:40:16 +02:00
timvisee
ed042b8515 Merge branch 'ckwalsh-detect_base_url' into master 2021-05-07 12:38:24 +02:00
timvisee
06bc58c93c Merge branch 'detect_base_url' of https://github.com/ckwalsh/send into ckwalsh-detect_base_url 2021-05-07 12:30:06 +02:00
timvisee
b58caed44f Merge branch 'dependabot/npm_and_yarn/url-parse-1.5.1' into master 2021-05-06 18:45:36 +02:00
timvisee
174ade1c2e Merge branch 'master' into dependabot/npm_and_yarn/url-parse-1.5.1 2021-05-06 18:44:28 +02:00
timvisee
31ce8c048b Merge branch 'dependabot/npm_and_yarn/lodash-4.17.21' into master 2021-05-06 18:38:29 +02:00
dependabot[bot]
ce401881d7 Bump url-parse from 1.4.7 to 1.5.1
Bumps [url-parse](https://github.com/unshiftio/url-parse) from 1.4.7 to 1.5.1.
- [Release notes](https://github.com/unshiftio/url-parse/releases)
- [Commits](https://github.com/unshiftio/url-parse/compare/1.4.7...1.5.1)

Signed-off-by: dependabot[bot] <support@github.com>
2021-05-06 16:35:49 +00:00
dependabot[bot]
c49e8e1062 Bump lodash from 4.17.20 to 4.17.21
Bumps [lodash](https://github.com/lodash/lodash) from 4.17.20 to 4.17.21.
- [Release notes](https://github.com/lodash/lodash/releases)
- [Commits](https://github.com/lodash/lodash/compare/4.17.20...4.17.21)

Signed-off-by: dependabot[bot] <support@github.com>
2021-05-06 16:35:08 +00:00
timvisee
15648157c9 Update dependencies 2021-05-06 18:31:34 +02:00
timvisee
4280edd5af Merge branch 'tjeerdhans-patch-1' into master
See https://github.com/timvisee/send/pull/26
2021-05-06 18:25:58 +02:00
Tjeerd Hans
a3d4e2c502 Some dutch grammar fixes 2021-05-06 17:24:31 +02:00
timvisee
bed5443685 Merge branch 'abhijitnathwani-patch-1' into master
See https://github.com/timvisee/send/pull/25
2021-05-06 11:31:27 +02:00
timvisee
f9f5d77cd0 Merge branch 'abhijitnathwani-patch-1' into master
See https://github.com/timvisee/send/pull/25
2021-05-06 11:30:59 +02:00
Abhijit Nathwani
0f8a6a107a Update git url in deployment.md 2021-05-06 12:21:55 +05:30
Cullen Walsh
02e8cb264f Add detect_base_url config
This diff adds the detect_base_url config, controlled by the
DETECT_BASE_URL env variable. When set to true, the BASE_URL setting is
ignored, and the base_url is derived from the request protocol and host
header.

Test Plan: Started up a local instance in my homelab, running docker
node:15 image with a nginx reverse proxy. Configured nginx to use the
same backend with multiple hostnames on https. Opened in browser and
confirmed og:url meta tag uses correct url.
2021-05-05 22:19:11 -07:00
timvisee
385ac595b9 Fix linguist documentation marker for locale files
Thanks https://news.ycombinator.com/item?id=27055526
2021-05-05 22:46:55 +02:00
timvisee
6df0876286 Merge branch 'whalehub-patch-1' into master 2021-05-03 00:16:41 +02:00
Aaron
827a35f73e main.css: Use ::marker to avoid browser console warning
Signed-off-by: Aaron <admin@datahoarder.dev>
2021-05-03 00:13:24 +02:00
timvisee
eb3a9e8c89 Bump version to 3.4.9 2021-04-21 21:52:18 +02:00
timvisee
6c3ac403f6 Update dependencies 2021-04-21 21:51:12 +02:00
timvisee
1ce2a60dd5 Merge branch 'whalehub-patch-1' into master
https://github.com/timvisee/send/pull/19
2021-04-21 21:49:21 +02:00
Aaron
f5bb74e921 index.js: Add "data:" as an allowed image source in CSP
Signed-off-by: Aaron <admin@datahoarder.dev>
2021-04-21 21:40:15 +02:00
timvisee
352fba6302 Update dependencies 2021-04-20 20:37:16 +02:00
timvisee
ace2aa5d73 Merge branch 'dependabot/npm_and_yarn/ssri-6.0.2' into master
See https://github.com/timvisee/send/pull/18
2021-04-20 20:36:17 +02:00
timvisee
3256b01276 Merge branch 'master' into dependabot/npm_and_yarn/ssri-6.0.2
See https://github.com/timvisee/send/pull/18
2021-04-20 20:35:35 +02:00
timvisee
96244132c6 Bump version to 3.4.8 2021-04-20 18:52:45 +02:00
timvisee
a9cdd13543 Update dependencies 2021-04-20 18:50:12 +02:00
timvisee
1b6c5b8f97 Only set Redis client password if password is specified
This attempts to fix a Redis connection issue when the Redis password
is an empty string.

See https://github.com/timvisee/send-docker-compose/issues/3#issuecomment-822885578
2021-04-20 18:37:19 +02:00
Tim Visée
27e6606516 Merge branch 'simao-silva-master-patch-09841' into 'master'
Update Alpine images to current tag

See merge request timvisee/send!15
2021-04-19 19:37:59 +00:00
Simão Silva
4902d304b6 Update Alpine images to current tag 2021-04-19 19:32:48 +00:00
timvisee
a182ff2dd1 Bump version to 3.4.7 2021-04-18 11:38:05 +02:00
timvisee
0361e3ce1c Update dependencies 2021-04-18 11:35:16 +02:00
dependabot[bot]
32539e58ac Bump ssri from 6.0.1 to 6.0.2
Bumps [ssri](https://github.com/npm/ssri) from 6.0.1 to 6.0.2.
- [Release notes](https://github.com/npm/ssri/releases)
- [Changelog](https://github.com/npm/ssri/blob/v6.0.2/CHANGELOG.md)
- [Commits](https://github.com/npm/ssri/compare/v6.0.1...v6.0.2)

Signed-off-by: dependabot[bot] <support@github.com>
2021-04-18 09:33:53 +00:00
Tim Visée
eeb1359d90 Merge branch 'moreopts' into 'master'
add env for redis pwd and port

See merge request timvisee/send!14
2021-04-18 09:32:39 +00:00
piaoger gong
e2dde364eb add env for redis pwd and port 2021-04-18 11:08:35 +08:00
timvisee
0c1ce9f598 Bump version to 3.4.6 2021-04-12 15:21:08 +02:00
timvisee
15d37da667 Remove obsolete anonymous limits
Related to https://gitlab.com/timvisee/send/-/issues/3
2021-04-12 15:19:02 +02:00
timvisee
9e4c063749 Update dependencies 2021-04-12 11:33:22 +02:00
timvisee
398d044ca2 Update dependencies 2021-04-09 12:52:30 +02:00
Tim Visée
d576003dd1 Merge branch 'thomassth-master-patch-74555' into 'master'
remove mozilla affiliation

See merge request timvisee/send!13
2021-04-02 09:20:08 +00:00
Thomas
fc0e8708b9 Merge branch 'thomassth-master-patch-60115' into 'thomassth-master-patch-74555'
Update localization.md

See merge request thomassth/send!1
2021-04-02 07:03:19 +00:00
Thomas
d05eb3e882 Update localization.md 2021-04-02 06:58:32 +00:00
Thomas
db2a55115a Update README.md 2021-04-02 06:55:18 +00:00
timvisee
c6316f2dad Merge branch 'dependabot/npm_and_yarn/y18n-4.0.1' into 'master'
Fixes https://github.com/timvisee/send/pull/13
2021-04-01 13:17:40 +02:00
dependabot[bot]
3d6611455a Bump y18n from 4.0.0 to 4.0.1
Bumps [y18n](https://github.com/yargs/y18n) from 4.0.0 to 4.0.1.
- [Release notes](https://github.com/yargs/y18n/releases)
- [Changelog](https://github.com/yargs/y18n/blob/master/CHANGELOG.md)
- [Commits](https://github.com/yargs/y18n/commits)

Signed-off-by: dependabot[bot] <support@github.com>
2021-04-01 13:17:25 +02:00
timvisee
5b37d2fc87 Update dependencies 2021-04-01 13:10:37 +02:00
timvisee
2fa214ea6f Add link to public instances list 2021-03-26 12:23:27 +01:00
timvisee
6dafdcdebd Bump version to 3.4.5 2021-03-15 21:17:44 +01:00
timvisee
dc03b42b96 Update dependencies 2021-03-15 21:16:57 +01:00
Tim Visée
3e07f648b3 Merge branch 'remove-fxa-dialog-on-big-file' into 'master'
Remove FxA dialog on file too big error

See merge request timvisee/send!12
2021-03-15 20:15:27 +00:00
Romain Hv
f58597cece Remove FxA dialog on file too big error 2021-03-15 20:58:47 +01:00
Tim Visée
d3f9b82672 Merge branch 'remove-metrics' into 'master'
Remove metrics

Closes #4

See merge request timvisee/send!11
2021-03-15 19:49:26 +00:00
Romain Hv
a0bc20aeb6 Remove metrics #4 2021-03-15 19:56:51 +01:00
timvisee
d03e83dd66 Merge branch 'dependabot/npm_and_yarn/elliptic-6.5.4'
Fixes https://github.com/timvisee/send/pull/8
2021-03-11 16:22:08 +01:00
dependabot[bot]
94e80ccee9 Bump elliptic from 6.5.3 to 6.5.4
Bumps [elliptic](https://github.com/indutny/elliptic) from 6.5.3 to 6.5.4.
- [Release notes](https://github.com/indutny/elliptic/releases)
- [Commits](https://github.com/indutny/elliptic/compare/v6.5.3...v6.5.4)

Signed-off-by: dependabot[bot] <support@github.com>
2021-03-11 16:21:38 +01:00
Tim Visée
f8358c4dac Merge branch 'help-review' into 'master'
Improvement of the reverse proxy guidelines, the given configuration example...

See merge request timvisee/send!10
2021-03-07 13:27:11 +00:00
Florian HEGRON
ec3cff63a2 Improvement of the reverse proxy guidelines, the given configuration example requires to have the apache rewrite mod enabled. 2021-03-07 14:24:47 +01:00
Tim Visée
8f192482b5 Merge branch 'help-review' into 'master'
Delete dead links about testing environments in the README.md

See merge request timvisee/send!9
2021-03-07 09:07:17 +00:00
Florian HEGRON
808a04b669 Delete dead links about testing environments in the README.md 2021-03-07 09:03:20 +00:00
timvisee
71a925a674 Bump version to 3.4.4 2021-02-16 15:18:03 +01:00
timvisee
64d9cd694d Update dependencies 2021-02-16 15:15:28 +01:00
timvisee
94b78b425f Use node 15 Docker image on GitLab CI 2021-02-16 15:12:23 +01:00
timvisee
42e94139a2 Merge branch 'atomheartother:master' into 'master'
See https://github.com/timvisee/send/pull/6
2021-02-16 15:10:22 +01:00
E. Navennec
6bd6280fb5 Use up to date alpine images and not slim images 2021-02-16 14:56:39 +01:00
timvisee
1f2c524b40 Include redis-mock in main build to allow usage without Redis 2021-02-16 14:56:00 +01:00
timvisee
854810c242 Use short commit hashes in Docker image tags 2021-02-16 14:24:58 +01:00
timvisee
45024d3dc6 Rework GitLab CI configuration, always build Docker image artifact
This artifact will be used as master-branch and release image, without
building the image again
2021-02-16 14:12:49 +01:00
timvisee
0806b8fd9d Bump version to 3.4.3 2021-02-05 02:31:15 +01:00
timvisee
2dbc740998 Update dependencies 2021-02-05 02:29:19 +01:00
timvisee
5b9c8301c7 Fix incorrect environment variable for config property 2021-02-05 02:24:48 +01:00
32 changed files with 2056 additions and 2104 deletions

4
.gitattributes vendored
View File

@@ -1,2 +1,2 @@
public/locales/* linguist-documentation
docs/* linguist-documentation
public/locales/*/*.ftl linguist-documentation
docs/** linguist-documentation

View File

@@ -1,4 +1,4 @@
image: "node:12-slim"
image: "node:15-slim"
stages:
- test
@@ -17,6 +17,7 @@ before_script:
- apt-get update
- apt-get install -y google-chrome-stable fonts-ipafont-gothic fonts-wqy-zenhei fonts-thai-tlwg fonts-kacst fonts-freefont-ttf libxss1 --no-install-recommends
# Build Send, run npm tests
test:
stage: test
script:
@@ -24,48 +25,81 @@ test:
- npm run lint
- npm test
# Release Docker image artifact for easy testing
# Build Docker image, export Docker image artifact
artifact-docker:
stage: artifact
image: docker:latest
needs: []
services:
- docker:dind
variables:
IMG_FILE: "send:git-$CI_COMMIT_SHORT_SHA.tar"
IMG_NAME: "send:git-$CI_COMMIT_SHORT_SHA"
before_script: []
script:
- docker build -t $IMG_NAME .
- docker image save -o $IMG_FILE $IMG_NAME
artifacts:
name: artifact-docker
paths:
- $IMG_FILE
expire_in: 1 week
# Release public Docker image for the master branch
release-docker-master:
stage: release
image: docker:latest
dependencies:
- artifact-docker
services:
- docker:dind
only:
- master
variables:
IMG_IMPORT_FILE: "send:git-$CI_COMMIT_SHORT_SHA.tar"
IMG_IMPORT_NAME: "send:git-$CI_COMMIT_SHORT_SHA"
IMG_NAME: "registry.gitlab.com/timvisee/send:master-$CI_COMMIT_SHORT_SHA"
before_script: []
script:
- export IMG_NAME=registry.gitlab.com/timvisee/send:master-$CI_COMMIT_SHA
# Login in to registry
- 'docker login registry.gitlab.com -u $DOCKER_USER -p $DOCKER_PASS'
# Build and push image, report image name
- docker build -t $IMG_NAME .
- docker push $IMG_NAME
- 'echo Docker image artifact published, available as:'
- 'echo " docker pull $IMG_NAME"'
# Load existing, retag for new image images
- docker image load -i $IMG_IMPORT_FILE
- docker tag $IMG_IMPORT_NAME $IMG_NAME
# Release public Docker image
# Publish tagged image
- docker push $IMG_NAME
- 'echo "Docker image artifact published, available as:" && echo " docker pull $IMG_NAME"'
# Release public Docker image for a version tag
release-docker:
stage: release
image: docker:latest
dependencies:
- artifact-docker
services:
- docker:dind
only:
- /^v(\d+\.)*\d+$/
variables:
IMG_IMPORT_FILE: "send:git-$CI_COMMIT_SHORT_SHA.tar"
IMG_IMPORT_NAME: "send:git-$CI_COMMIT_SHORT_SHA"
IMG_NAME: "registry.gitlab.com/timvisee/send:$CI_COMMIT_REF_NAME"
IMG_NAME_LATEST: "registry.gitlab.com/timvisee/send:latest"
before_script: []
script:
- export IMG_NAME=registry.gitlab.com/timvisee/send:$CI_COMMIT_REF_NAME
- export IMG_NAME_LATEST=registry.gitlab.com/timvisee/send:latest
# Login in to registry
- 'docker login registry.gitlab.com -u $DOCKER_USER -p $DOCKER_PASS'
# Build and push image, report image name
- docker build -t $IMG_NAME .
- docker tag $IMG_NAME $IMG_NAME_LATEST
# Load existing, retag for new image images
- docker image load -i $IMG_IMPORT_FILE
- docker tag $IMG_IMPORT_NAME $IMG_NAME
- docker tag $IMG_IMPORT_NAME $IMG_NAME_LATEST
# Publish tagged image
- docker push $IMG_NAME
- docker push $IMG_NAME_LATEST
- 'echo Docker image artifact published, available as:'
- 'echo " docker pull $IMG_NAME_LATEST"'
- 'echo " docker pull $IMG_NAME"'
- 'echo "Docker image artifact published, available as:" && echo " docker pull $IMG_NAME_LATEST" && echo " docker pull $IMG_NAME"'

View File

@@ -6,13 +6,13 @@
# Build project
FROM node:12 AS builder
FROM node:current-alpine AS builder
RUN set -x \
# Add user
&& addgroup --gid 10001 app \
&& adduser --disabled-password \
--gecos '' \
--gid 10001 \
--ingroup app \
--home /app \
--uid 10001 \
app
@@ -26,19 +26,17 @@ RUN set -x \
# Main image
FROM node:12-slim
FROM node:current-alpine
RUN set -x \
# Add user
&& addgroup --gid 10001 app \
&& adduser --disabled-password \
--gecos '' \
--gid 10001 \
--ingroup app \
--home /app \
--uid 10001 \
app
RUN apt-get update && apt-get -y install \
git-core \
&& rm -rf /var/lib/apt/lists/*
USER app
WORKDIR /app
COPY --chown=app:app package*.json ./

View File

@@ -53,7 +53,7 @@ Thanks [Mozilla][mozilla] for building this amazing tool!
---
**Docs:** [FAQ](docs/faq.md), [Encryption](docs/encryption.md), [Build](docs/build.md), [Docker](docs/docker.md), [Metrics](docs/metrics.md), [More](docs/)
**Docs:** [FAQ](docs/faq.md), [Encryption](docs/encryption.md), [Build](docs/build.md), [Docker](docs/docker.md), [More](docs/)
---
@@ -66,7 +66,7 @@ Thanks [Mozilla][mozilla] for building this amazing tool!
* [Configuration](#configuration)
* [Localization](#localization)
* [Contributing](#contributing)
* [Testing](#testing)
* [Instances](#instances)
* [Deployment](#deployment)
* [Clients](#clients)
* [License](#license)
@@ -121,31 +121,25 @@ The server is configured with environment variables. See [server/config.js](serv
## Localization
Send localization is managed via [Pontoon](https://pontoon.mozilla.org/projects/test-pilot-firefox-send/), not direct pull requests to the repository. If you want to fix a typo, add a new language, or simply know more about localization, please get in touch with the [existing localization team](https://pontoon.mozilla.org/teams/) for your language or Mozillas [l10n-drivers](https://wiki.mozilla.org/L10n:Mozilla_Team#Mozilla_Corporation) for guidance.
see also [docs/localization.md](docs/localization.md)
see [docs/localization.md](docs/localization.md)
---
## Contributing
Pull requests are always welcome! Feel free to check out the list of ["good first issues"](https://github.com/mozilla/send/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22).
Pull requests are always welcome! Feel free to check out the list of "good first issues" (to be implemented).
---
## Testing
## Instances
| ENVIRONMENT | URL
|-------------|-----
| Production | <https://send.firefox.com/>
| Stage | <https://stage.send.nonprod.cloudops.mozgcp.net/>
| Development | <https://send2.dev.lcip.org/>
Find a list of public instances here: https://github.com/timvisee/send-instances/
---
## Deployment
see also [docs/deployment.md](docs/deployment.md)
See also [docs/deployment.md](docs/deployment.md)
---

View File

@@ -4,7 +4,6 @@ import html from 'choo/html';
import * as Sentry from '@sentry/browser';
import { setApiUrlPrefix, getConstants } from '../app/api';
import metrics from '../app/metrics';
//import assets from '../common/assets';
import Archive from '../app/archive';
import Header from '../app/ui/header';
@@ -83,7 +82,6 @@ function body(main) {
state.user = new User(storage, LIMITS);
state.sentry = Sentry;
});
app.use(metrics);
app.route('/', body(home));
app.route('/upload', upload);
app.route('/share/:id', share);

View File

@@ -420,17 +420,6 @@ export async function setFileList(bearerToken, kid, data) {
return response.ok;
}
export function sendMetrics(blob) {
if (!navigator.sendBeacon) {
return;
}
try {
navigator.sendBeacon(getApiUrl('/api/metrics'), blob);
} catch (e) {
console.error(e);
}
}
export async function getConstants() {
const response = await fetch(getApiUrl('/config'));

View File

@@ -1,4 +1,3 @@
import * as metrics from './metrics';
import FileReceiver from './fileReceiver';
import FileSender from './fileSender';
import copyDialog from './ui/copyDialog';
@@ -54,7 +53,6 @@ export default function(state, emitter) {
emitter.on('logout', async () => {
await state.user.logout();
metrics.loggedOut({ trigger: 'button' });
emitter.emit('pushState', '/');
});
@@ -68,14 +66,6 @@ export default function(state, emitter) {
emitter.on('delete', async ownedFile => {
try {
metrics.deletedUpload({
size: ownedFile.size,
time: ownedFile.time,
speed: ownedFile.speed,
type: ownedFile.type,
ttl: ownedFile.expiresAt - Date.now(),
location
});
state.storage.remove(ownedFile.id);
await ownedFile.del();
} catch (e) {
@@ -101,9 +91,6 @@ export default function(state, emitter) {
state.LIMITS.MAX_FILES_PER_ARCHIVE
);
} catch (e) {
if (e.message === 'fileTooBig' && maxSize < state.LIMITS.MAX_FILE_SIZE) {
return emitter.emit('signup-cta', 'size');
}
state.modal = okDialog(
state.translate(e.message, {
size: bytes(maxSize),
@@ -123,7 +110,7 @@ export default function(state, emitter) {
source: query.utm_source,
term: query.utm_term
});
state.modal = signupDialog(source);
state.modal = signupDialog();
render();
});
@@ -159,12 +146,9 @@ export default function(state, emitter) {
const links = openLinksInNewTab();
await delay(200);
const start = Date.now();
try {
const ownedFile = await sender.upload(archive, state.user.bearerToken);
state.storage.totalUploads += 1;
const duration = Date.now() - start;
metrics.completedUpload(archive, duration);
faviconProgressbar.updateFavicon(0);
state.storage.addFile(ownedFile);
@@ -181,7 +165,6 @@ export default function(state, emitter) {
} catch (err) {
if (err.message === '0') {
//cancelled. do nothing
metrics.cancelledUpload(archive, err.duration);
render();
} else if (err.message === '401') {
const refreshed = await state.user.refresh();
@@ -197,7 +180,6 @@ export default function(state, emitter) {
scope.setExtra('size', err.size);
state.sentry.captureException(err);
});
metrics.stoppedUpload(archive, err.duration);
emitter.emit('pushState', '/error');
}
} finally {
@@ -249,13 +231,11 @@ export default function(state, emitter) {
render();
});
emitter.on('download', async file => {
emitter.on('download', async () => {
state.transfer.on('progress', updateProgress);
state.transfer.on('decrypting', render);
state.transfer.on('complete', render);
const links = openLinksInNewTab();
const size = file.size;
const start = Date.now();
try {
const dl = state.transfer.download({
stream: state.capabilities.streamDownload
@@ -263,12 +243,6 @@ export default function(state, emitter) {
render();
await dl;
state.storage.totalDownloads += 1;
const duration = Date.now() - start;
metrics.completedDownload({
size,
duration,
password_protected: file.requiresPassword
});
faviconProgressbar.updateFavicon(0);
} catch (err) {
if (err.message === '0') {
@@ -286,12 +260,6 @@ export default function(state, emitter) {
scope.setExtra('progress', err.progress);
state.sentry.captureException(err);
});
const duration = Date.now() - start;
metrics.stoppedDownload({
size,
duration,
password_protected: file.requiresPassword
});
}
emitter.emit('pushState', location);
}
@@ -302,7 +270,6 @@ export default function(state, emitter) {
emitter.on('copy', ({ url }) => {
copyToClipboard(url);
// metrics.copiedLink({ location });
});
emitter.on('closeModal', () => {

View File

@@ -118,7 +118,7 @@ details {
overflow: hidden;
}
details > summary::-webkit-details-marker {
details > summary::marker {
display: none;
}

View File

@@ -10,7 +10,6 @@ import controller from './controller';
import dragManager from './dragManager';
import pasteManager from './pasteManager';
import storage from './storage';
import metrics from './metrics';
import experiments from './experiments';
import * as Sentry from '@sentry/browser';
import './main.css';
@@ -68,7 +67,6 @@ if (process.env.NODE_ENV === 'production') {
// eslint-disable-next-line require-atomic-updates
window.app = app;
app.use(experiments);
app.use(metrics);
app.use(controller);
app.use(dragManager);
app.use(pasteManager);

View File

@@ -1,186 +0,0 @@
import storage from './storage';
import { platform, locale } from './utils';
import { sendMetrics } from './api';
let appState = null;
let experiment = null;
const HOUR = 1000 * 60 * 60;
const events = [];
let session_id = Date.now();
const lang = locale();
export default function initialize(state, emitter) {
appState = state;
emitter.on('DOMContentLoaded', () => {
experiment = storage.enrolled;
if (!appState.user.firstAction) {
appState.user.firstAction =
appState.route === '/' ? 'upload' : 'download';
}
const query = appState.query;
addEvent('client_visit', {
entrypoint: appState.route === '/' ? 'upload' : 'download',
referrer: document.referrer,
utm_campaign: query.utm_campaign,
utm_content: query.utm_content,
utm_medium: query.utm_medium,
utm_source: query.utm_source,
utm_term: query.utm_term
});
});
emitter.on('experiment', experimentEvent);
window.addEventListener('unload', submitEvents);
}
function sizeOrder(n) {
return Math.floor(Math.log10(n));
}
function submitEvents() {
if (navigator.doNotTrack === '1') {
return;
}
sendMetrics(
new Blob(
[
JSON.stringify({
now: Date.now(),
session_id,
lang,
platform: platform(),
events
})
],
{ type: 'text/plain' } // see http://crbug.com/490015
)
);
events.splice(0);
}
async function addEvent(event_type, event_properties) {
const user_id = await appState.user.metricId();
const device_id = await appState.user.deviceId();
const ab_id = Object.keys(experiment)[0];
if (ab_id) {
event_properties.experiment = ab_id;
event_properties.variant = experiment[ab_id];
}
events.push({
device_id,
event_properties,
event_type,
time: Date.now(),
user_id,
user_properties: {
anonymous: !appState.user.loggedIn,
first_action: appState.user.firstAction,
active_count: storage.files.length
}
});
if (events.length === 25) {
submitEvents();
}
}
function cancelledUpload(archive, duration) {
return addEvent('client_upload', {
download_limit: archive.dlimit,
duration: sizeOrder(duration),
file_count: archive.numFiles,
password_protected: !!archive.password,
size: sizeOrder(archive.size),
status: 'cancel',
time_limit: archive.timeLimit
});
}
function completedUpload(archive, duration) {
return addEvent('client_upload', {
download_limit: archive.dlimit,
duration: sizeOrder(duration),
file_count: archive.numFiles,
password_protected: !!archive.password,
size: sizeOrder(archive.size),
status: 'ok',
time_limit: archive.timeLimit
});
}
function stoppedUpload(archive, duration = 0) {
return addEvent('client_upload', {
download_limit: archive.dlimit,
duration: sizeOrder(duration),
file_count: archive.numFiles,
password_protected: !!archive.password,
size: sizeOrder(archive.size),
status: 'error',
time_limit: archive.timeLimit
});
}
function stoppedDownload(params) {
return addEvent('client_download', {
duration: sizeOrder(params.duration),
password_protected: params.password_protected,
size: sizeOrder(params.size),
status: 'error'
});
}
function completedDownload(params) {
return addEvent('client_download', {
duration: sizeOrder(params.duration),
password_protected: params.password_protected,
size: sizeOrder(params.size),
status: 'ok'
});
}
function deletedUpload(ownedFile) {
return addEvent('client_delete', {
age: Math.floor((Date.now() - ownedFile.createdAt) / HOUR),
downloaded: ownedFile.dtotal > 0,
status: 'ok'
});
}
function experimentEvent(params) {
return addEvent('client_experiment', params);
}
function submittedSignup(params) {
return addEvent('client_login', {
status: 'ok',
trigger: params.trigger
});
}
function canceledSignup(params) {
return addEvent('client_login', {
status: 'cancel',
trigger: params.trigger
});
}
function loggedOut(params) {
addEvent('client_logout', {
status: 'ok',
trigger: params.trigger
});
// flush events and start new anon session
submitEvents();
session_id = Date.now();
}
export {
cancelledUpload,
stoppedUpload,
completedUpload,
deletedUpload,
stoppedDownload,
completedDownload,
submittedSignup,
canceledSignup,
loggedOut
};

View File

@@ -580,7 +580,7 @@ module.exports.preview = function(state, emit) {
function download(event) {
event.preventDefault();
event.target.disabled = true;
emit('download', archive);
emit('download');
}
};

View File

@@ -100,7 +100,7 @@ module.exports = function(state, emit) {
);
break;
case 'download':
emit('download', archive);
emit('download');
break;
}
return false;

View File

@@ -1,9 +1,8 @@
const html = require('choo/html');
const assets = require('../../common/assets');
const { bytes } = require('../utils');
const { canceledSignup, submittedSignup } = require('../metrics');
module.exports = function(trigger) {
module.exports = function() {
return function(state, emit, close) {
const DAYS = Math.floor(state.LIMITS.MAX_EXPIRE_SECONDS / 86400);
let submitting = false;
@@ -72,7 +71,6 @@ module.exports = function(trigger) {
}
function cancel(event) {
canceledSignup({ trigger });
close(event);
}
@@ -85,7 +83,6 @@ module.exports = function(trigger) {
const el = document.getElementById('email-input');
const email = el.value;
submittedSignup({ trigger });
emit('login', emailish(email) ? email : null);
}
};

View File

@@ -81,21 +81,15 @@ export default class User {
}
get maxSize() {
return this.loggedIn
? this.limits.MAX_FILE_SIZE
: this.limits.ANON.MAX_FILE_SIZE;
return this.limits.MAX_FILE_SIZE;
}
get maxExpireSeconds() {
return this.loggedIn
? this.limits.MAX_EXPIRE_SECONDS
: this.limits.ANON.MAX_EXPIRE_SECONDS;
return this.limits.MAX_EXPIRE_SECONDS;
}
get maxDownloads() {
return this.loggedIn
? this.limits.MAX_DOWNLOADS
: this.limits.ANON.MAX_DOWNLOADS;
return this.limits.MAX_DOWNLOADS;
}
async metricId() {
@@ -109,27 +103,8 @@ export default class User {
async startAuthFlow(trigger, utms = {}) {
this.utms = utms;
this.trigger = trigger;
try {
const params = new URLSearchParams({
entrypoint: `send-${trigger}`,
form_type: 'email',
utm_source: utms.source || 'send',
utm_campaign: utms.campaign || 'none'
});
const res = await fetch(
`${this.authConfig.issuer}/metrics-flow?${params.toString()}`,
{
mode: 'cors'
}
);
const { flowId, flowBeginTime } = await res.json();
this.flowId = flowId;
this.flowBeginTime = flowBeginTime;
} catch (e) {
console.error(e);
this.flowId = null;
this.flowBeginTime = null;
}
this.flowId = null;
this.flowBeginTime = null;
}
async login(email) {

View File

@@ -13,7 +13,7 @@ For Debian/Ubuntu systems this probably just means something like this:
## Building
* We assume an already configured virtual-host on your webserver with an existing empty htdocs folder
* First, remove that htdocs folder - we will replace it with Send's version now
* git clone https://github.com/mozilla/send.git htdocs
* git clone https://github.com/timvisee/send.git htdocs
* Make now sure you are NOT root but rather the user your webserver is serving files under (e.g. "su www-data" or whoever the owner of your htdocs folder is)
* npm install
* npm run build
@@ -40,6 +40,7 @@ Of course, we don't want to expose the service on port 1443. Instead we want our
* a2enmod proxy
* a2enmod proxy_http
* a2enmod proxy_wstunnel
* a2enmod rewrite
In your Apache virtual host configuration file, insert this:

View File

@@ -1,6 +1,6 @@
# Localization
Send is localized in over 50 languages. We use the [fluent](http://projectfluent.org/) library and store our translations in [FTL](http://projectfluent.org/fluent/guide/) files in `public/locales/`. `en-US` is our base language, and other languages are managed by [pontoon](https://pontoon.mozilla.org/projects/test-pilot-firefox-send/).
Send is localized in over 50 languages. We use the [fluent](http://projectfluent.org/) library and store our translations in [FTL](http://projectfluent.org/fluent/guide/) files in `public/locales/`. `en-US` is our base language.
## Process

View File

@@ -1,128 +0,0 @@
# Send V2 Metrics Definitions
## Key Value Prop
Quickly and privately transfer large files from any device to any device.
## Key Business Question to Answer
Is the value proposition of a large encrypted file transfer service enough to drive Firefox Account relationships for non-Firefox users.
## Hypotheses to Test
### Primary - In support of Relationships KPI
We believe that a privacy-respecting file transfer service can drive Firefox Accounts beyond the Firefox Browser.
We will know this to be true when we see 250k Firefox Account creations from non-Firefox contexts w/in six months of launch.
### Secondary - In support of Revenue KPI
We believe that a privacy respecting service accessible beyond the reach of Firefox will provide a valuable platform to research, communicate with, and market to conscious choosers we have traditionally found hard to reach.
We will know this to be true when we can conduct six research tasks (surveys, A/B tests, fake doors, etc) in support of premium services KPIs in the first six months after launch.
## Overview of Key Measures
* Number of people using the service to send and receive files
* Why: measure of service size. Important for understanding addressable market size
* Percent of users who have or create an FxAccount via Send
* Why: representation of % of any service users who might be amenable to an upsell
* % of downloaders who convert into uploaders
* Why: represents a measure of our key growth-loop potential
* Count of uploads and size
* Why: Represents cost of service on a running basis
## Key Funnels
* App Open or Visit `--- DESIRED OUTCOME --->` Successful Upload
* Download UI Visit `--- DESIRED OUTCOME --->` Successful Download
* FxA UI Engagement `--- DESIRED OUTCOME --->` Authenticate
* **STRETCH** App Open or Visit `--- DESIRED OUTCOME --->` Successful Download
## Amplitude Schema
Please see, **See Amplitude HTTP API**(https://amplitude.zendesk.com/hc/en-us/articles/204771828) for HTTP API reference.
## Metric Events
In support of our KPIs we collect events from two separate contexts, server and client. The events are designed to have minimal correlation between contexts.
Server events collect lifecycle information about individual uploads but no user information; also time precision is truncated to hour increments. Client events collect information about how users interact with the UI but no upload identifiers.
### Server Events
Server events allow us to aggregate data about file lifecycle without collecting data about individual users. In this context `user_id` and `user_properties` describe the uploaded archive.
* `session_id` -1 (not part of a session)
* `user_id` hash of (archive_id + owner_id)
* `app_version` package.json version
* `time` timestamp truncated to hour precision
* `country`
* `region`
* `event_type` [server_upload | server_download | server_delete]
* `user_properties`
* `download_limit` set number of downloads
* `time_limit` set expiry duration
* `size` approximate size (log10)
* `anonymous` true if anonymous, false if fxa
* `event_properties`
* `download_count` downloads completed
* `ttl` time remaining before expiry truncated to hour
* `agent` the browser name or first 6 characters of the user agent that made the request
### Client Events
Client events allow us to aggregate data about how the user interface is being used without tracking the lifecycle of individual files. In this context `user_id` and `user_properties` describe the user. The `user_id` and `device_id` change for all users at the beginning of each month.
* `session_id` timestamp
* `user_id` hash of (fxa_id + Date.year + Date.month)
* `device_id` hash of (localStorage random id + Date.year + Date.month)
* `platform` [web | android]
* `country`
* `region`
* `language`
* `time` timestamp
* `os_name`
* `event_type` [client_visit | client_upload | client_download | client_delete | client_login | client_logout]
* `event_properties`
* `browser`
* `browser_version`
* `status` [ ok | error | cancel ]
* Event specific properties (see below)
* `user_properties`
* `active_count` number of active uploads
* `anonymous` true if anonymous, false if fxa
* `experiments` list of experiment ids the user is participating in
* `first_action` how this use came to Send the first time [ upload | download ]
#### Visit Event
* `entrypoint` [ upload | download ]
#### Upload Event
* `download_limit` download limit
* `file_count` number of files
* `password_protected` boolean
* `size` approximate size (log10)
* `time_limit` time limit
* `duration` approximate transfer duration (log10)
#### Download Event
* `password_protected` boolean
* `size` approximate size (log10)
* `duration` approximate transfer duration (log10)
#### Delete Event
* `age` hours since uploaded
* `downloaded` downloaded at least once
#### Login Event
* `trigger` [button | time | count | size]
#### Logout Event
* `trigger` [button | timeout]

3260
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,7 @@
{
"name": "send",
"description": "File Sharing Experiment",
"version": "3.4.2",
"version": "3.4.10",
"author": "Mozilla (https://mozilla.org)",
"contributors": [
"Tim Visee <3a4fb3964f@sinenomine.email> (https://timvisee.com)"
@@ -30,7 +30,7 @@
"test:report": "nyc report --reporter=html",
"test-integration": "cross-env NODE_ENV=development wdio test/wdio.docker.conf.js",
"circleci-test-integration": "echo 'webdriverio tests need to be updated to node 12'",
"start": "npm run clean && cross-env NODE_ENV=development L10N_DEV=true FXA_CLIENT_ID=fced6b5e3f4c66b9 BASE_URL=http://localhost:8080 webpack-dev-server --mode=development",
"start": "npm run clean && cross-env NODE_ENV=development L10N_DEV=true BASE_URL=http://localhost:8080 DETECT_BASE_URL=true webpack-dev-server --mode=development",
"android": "cross-env ANDROID=1 npm start",
"prod": "node server/bin/prod.js"
},
@@ -61,13 +61,13 @@
"cache": true
},
"engines": {
"node": "^12.16.3"
"node": "^15.5.1"
},
"devDependencies": {
"@babel/core": "^7.12.10",
"@babel/plugin-proposal-class-properties": "^7.12.1",
"@babel/core": "^7.14.0",
"@babel/plugin-proposal-class-properties": "^7.13.0",
"@babel/plugin-syntax-dynamic-import": "^7.2.0",
"@babel/preset-env": "^7.12.11",
"@babel/preset-env": "^7.14.1",
"@dannycoates/webcrypto-liner": "^0.1.37",
"@fullhuman/postcss-purgecss": "^1.3.0",
"@mattiasbuelens/web-streams-polyfill": "0.2.1",
@@ -78,12 +78,12 @@
"base64-js": "^1.5.1",
"content-disposition": "^0.5.3",
"copy-webpack-plugin": "^5.1.2",
"core-js": "^3.8.3",
"core-js": "^3.12.0",
"crc": "^3.8.0",
"cross-env": "^6.0.3",
"css-loader": "^3.6.0",
"css-mqpacker": "^7.0.0",
"cssnano": "^4.1.10",
"cssnano": "^4.1.11",
"eslint": "^6.6.0",
"eslint-config-prettier": "^6.15.0",
"eslint-plugin-mocha": "^6.2.1",
@@ -102,7 +102,7 @@
"lint-staged": "^9.4.2",
"mocha": "^6.2.2",
"morgan": "^1.9.1",
"nanobus": "^4.4.0",
"nanobus": "^4.5.0",
"nanohtml": "^1.9.0",
"nanotiming": "^7.3.1",
"npm-run-all": "^4.1.5",
@@ -113,16 +113,15 @@
"proxyquire": "^2.1.3",
"puppeteer": "^2.0.0",
"raw-loader": "^3.1.0",
"redis-mock": "^0.47.0",
"rimraf": "^3.0.0",
"script-loader": "^0.7.2",
"sinon": "^7.5.0",
"string-hash": "^1.1.3",
"stylelint": "^13.9.0",
"stylelint": "^13.13.1",
"stylelint-config-standard": "^19.0.0",
"stylelint-no-unsupported-browser-features": "^4.1.4",
"svgo": "^1.3.2",
"svgo-loader": "^2.2.1",
"svgo-loader": "^2.2.2",
"tailwindcss": "^1.9.6",
"val-loader": "^1.1.1",
"webpack": "4.38.0",
@@ -136,9 +135,9 @@
"@dannycoates/express-ws": "^5.0.3",
"@fluent/bundle": "^0.13.0",
"@fluent/langneg": "^0.3.0",
"@google-cloud/storage": "^5.7.3",
"@google-cloud/storage": "^5.8.5",
"@sentry/node": "^5.30.0",
"aws-sdk": "^2.831.0",
"aws-sdk": "^2.902.0",
"body-parser": "^1.19.0",
"choo": "^7.0.0",
"cldr-core": "^35.1.0",
@@ -150,8 +149,9 @@
"mozlog": "^2.2.0",
"node-fetch": "^2.6.1",
"redis": "^2.8.0",
"redis-mock": "^0.47.0",
"selenium-standalone": "^6.23.0",
"ua-parser-js": "^0.7.23"
"ua-parser-js": "^0.7.28"
},
"availableLanguages": [
"en-US",

View File

@@ -28,7 +28,7 @@ notSupportedOutdatedDetail = Helaas ondersteunt deze versie van Firefox de webte
updateFirefox = Firefox bijwerken
deletePopupCancel = Annuleren
deleteButtonHover = Verwijderen
footerText = Niet aangesloten aan Mozilla of Firefox.
footerText = Niet gelieerd aan Mozilla of Firefox.
footerLinkDonate = Doneren
footerLinkCli = CLI
footerLinkDmca = DMCA
@@ -52,7 +52,7 @@ passwordSetError = Dit wachtwoord kon niet worden ingesteld
-send-short-brand = Send
-firefox = Firefox
-mozilla = Mozilla
introTitle = Eenvoudig, privé bestanden delen
introTitle = Bestanden delen, eenvoudig en privé
introDescription = Met { -send-brand } kunt u bestanden delen met end-to-endversleuteling en een koppeling die automatisch verloopt. Hierdoor kunt u privé houden wat u wilt delen en er zeker van zijn dat uw zaken niet voor altijd online blijven.
notifyUploadEncryptDone = Uw bestand is versleuteld en klaar voor verzending
# downloadCount is from the downloadCount string and timespan is a timespanMinutes string. ex. 'Expires after 2 downloads or 25 minutes'

View File

@@ -1,171 +0,0 @@
const crypto = require('crypto');
const fetch = require('node-fetch');
const config = require('./config');
const pkg = require('../package.json');
const HOUR = 1000 * 60 * 60;
function truncateToHour(timestamp) {
return Math.floor(timestamp / HOUR) * HOUR;
}
function orderOfMagnitude(n) {
return Math.floor(Math.log10(n));
}
function userId(fileId, ownerId) {
const hash = crypto.createHash('sha256');
hash.update(fileId);
hash.update(ownerId);
return hash.digest('hex').substring(32);
}
function statUploadEvent(data) {
const event = {
session_id: -1,
country: data.country,
region: data.state,
user_id: userId(data.id, data.owner),
app_version: pkg.version,
time: truncateToHour(Date.now()),
event_type: 'server_upload',
user_properties: {
download_limit: data.dlimit,
time_limit: data.timeLimit,
size: orderOfMagnitude(data.size),
anonymous: data.anonymous
},
event_properties: {
agent: data.agent
},
event_id: 0
};
return sendBatch([event]);
}
function statDownloadEvent(data) {
const event = {
session_id: -1,
country: data.country,
region: data.state,
user_id: userId(data.id, data.owner),
app_version: pkg.version,
time: truncateToHour(Date.now()),
event_type: 'server_download',
event_properties: {
agent: data.agent,
download_count: data.download_count,
ttl: data.ttl
},
event_id: data.download_count
};
return sendBatch([event]);
}
function statDeleteEvent(data) {
const event = {
session_id: -1,
country: data.country,
region: data.state,
user_id: userId(data.id, data.owner),
app_version: pkg.version,
time: truncateToHour(Date.now()),
event_type: 'server_delete',
event_properties: {
agent: data.agent,
download_count: data.download_count,
ttl: data.ttl
},
event_id: data.download_count + 1
};
return sendBatch([event]);
}
function clientEvent(
event,
ua,
language,
session_id,
deltaT,
platform,
country,
state
) {
const ep = event.event_properties || {};
const up = event.user_properties || {};
const event_properties = {
browser: ua.browser.name,
browser_version: ua.browser.version,
status: ep.status,
age: ep.age,
downloaded: ep.downloaded,
download_limit: ep.download_limit,
duration: ep.duration,
entrypoint: ep.entrypoint,
file_count: ep.file_count,
password_protected: ep.password_protected,
referrer: ep.referrer,
size: ep.size,
time_limit: ep.time_limit,
trigger: ep.trigger,
ttl: ep.ttl,
utm_campaign: ep.utm_campaign,
utm_content: ep.utm_content,
utm_medium: ep.utm_medium,
utm_source: ep.utm_source,
utm_term: ep.utm_term,
experiment: ep.experiment,
variant: ep.variant
};
const user_properties = {
active_count: up.active_count,
anonymous: up.anonymous,
experiments: up.experiments,
first_action: up.first_action
};
return {
app_version: pkg.version,
country: country,
device_id: event.device_id,
event_properties,
event_type: event.event_type,
language,
os_name: ua.os.name,
os_version: ua.os.version,
platform,
region: state,
session_id,
time: event.time + deltaT,
user_id: event.user_id,
user_properties
};
}
async function sendBatch(events, timeout = 1000) {
if (!config.amplitude_id) {
return 200;
}
try {
const result = await fetch('https://api.amplitude.com/batch', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
api_key: config.amplitude_id,
events
}),
timeout
});
return result.status;
} catch (e) {
return 500;
}
}
module.exports = {
statUploadEvent,
statDownloadEvent,
statDeleteEvent,
clientEvent,
sendBatch
};

View File

@@ -2,11 +2,6 @@ const config = require('./config');
module.exports = {
LIMITS: {
ANON: {
MAX_FILE_SIZE: config.anon_max_file_size,
MAX_DOWNLOADS: config.anon_max_downloads,
MAX_EXPIRE_SECONDS: config.anon_max_expire_seconds
},
MAX_FILE_SIZE: config.max_file_size,
MAX_DOWNLOADS: config.max_downloads,
MAX_EXPIRE_SECONDS: config.max_expire_seconds,

View File

@@ -39,11 +39,6 @@ const conf = convict({
default: 86400 * 7,
env: 'MAX_EXPIRE_SECONDS'
},
anon_max_expire_seconds: {
format: Number,
default: 86400,
env: 'ANON_MAX_EXPIRE_SECONDS'
},
download_counts: {
format: Array,
default: [1, 2, 3, 4, 5, 20, 50, 100],
@@ -54,11 +49,6 @@ const conf = convict({
default: 100,
env: 'MAX_DOWNLOADS'
},
anon_max_downloads: {
format: Number,
default: 5,
env: 'ANON_MAX_DOWNLOADS'
},
max_files_per_archive: {
format: Number,
default: 64,
@@ -74,6 +64,16 @@ const conf = convict({
default: 'localhost',
env: 'REDIS_HOST'
},
redis_port: {
format: Number,
default: 6379,
env: 'REDIS_PORT'
},
redis_password: {
format: String,
default: '',
env: 'REDIS_PASSWORD'
},
redis_event_expire: {
format: Boolean,
default: false,
@@ -100,16 +100,6 @@ const conf = convict({
arg: 'port',
env: 'PORT'
},
amplitude_id: {
format: String,
default: '',
env: 'AMPLITUDE_ID'
},
analytics_id: {
format: String,
default: '',
env: 'GOOGLE_ANALYTICS_ID'
},
sentry_id: {
format: String,
default: '',
@@ -130,11 +120,6 @@ const conf = convict({
default: 1024 * 1024 * 1024 * 2.5,
env: 'MAX_FILE_SIZE'
},
anon_max_file_size: {
format: Number,
default: 1024 * 1024 * 1024,
env: 'ANON_MAX_FILE_SIZE'
},
l10n_dev: {
format: Boolean,
default: false,
@@ -145,6 +130,11 @@ const conf = convict({
default: 'https://send.firefox.com',
env: 'BASE_URL'
},
detect_base_url: {
format: Boolean,
default: false,
env: 'DETECT_BASE_URL'
},
file_dir: {
format: 'String',
default: `${tmpdir()}${path.sep}send-${randomBytes(4).toString('hex')}`,
@@ -213,7 +203,7 @@ const conf = convict({
footer_source_url: {
format: String,
default: 'https://github.com/timvisee/send',
env: 'SEND_FOOTER_CLI_URL'
env: 'SEND_FOOTER_SOURCE_URL'
}
});
@@ -221,4 +211,17 @@ const conf = convict({
conf.validate({ allowed: 'strict' });
const props = conf.getProperties();
module.exports = props;
const deriveBaseUrl = req => {
if (!props.detect_base_url) {
return props.base_url;
}
const protocol = req.secure ? 'https://' : 'http://';
return `${protocol}${req.headers.host}`;
};
module.exports = {
...props,
deriveBaseUrl
};

View File

@@ -1,23 +1,10 @@
const storage = require('../storage');
const { statDeleteEvent } = require('../amplitude');
module.exports = async function(req, res) {
try {
const id = req.params.id;
const meta = req.meta;
const ttl = await storage.ttl(id);
await storage.del(id);
res.sendStatus(200);
statDeleteEvent({
id,
ip: req.ip,
country: req.geo.country,
state: req.geo.state,
owner: meta.owner,
download_count: meta.dl,
ttl,
agent: req.ua.browser.name || req.ua.ua.substring(0, 6)
});
} catch (e) {
res.sendStatus(404);
}

View File

@@ -1,7 +1,6 @@
const storage = require('../storage');
const mozlog = require('../log');
const log = mozlog('send.download');
const { statDownloadEvent } = require('../amplitude');
module.exports = async function(req, res) {
const id = req.params.id;
@@ -27,17 +26,6 @@ module.exports = async function(req, res) {
const dl = meta.dl + 1;
const dlimit = meta.dlimit;
const ttl = await storage.ttl(id);
statDownloadEvent({
id,
ip: req.ip,
country: req.geo.country,
state: req.geo.state,
owner: meta.owner,
download_count: dl,
ttl,
agent: req.ua.browser.name || req.ua.ua.substring(0, 6)
});
try {
if (dl >= dlimit) {
await storage.del(id);

View File

@@ -36,9 +36,14 @@ module.exports = function(app) {
defaultSrc: ["'self'"],
connectSrc: [
"'self'",
config.base_url.replace(/^https:\/\//, 'wss://')
function(req) {
const baseUrl = config.deriveBaseUrl(req);
const r = baseUrl.replace(/^http(s?):\/\//, 'ws$1://');
console.log([baseUrl, r]);
return r;
}
],
imgSrc: ["'self'"],
imgSrc: ["'self'", 'data:'],
scriptSrc: [
"'self'",
function(req) {
@@ -52,10 +57,6 @@ module.exports = function(app) {
}
};
csp.directives.connectSrc.push(
config.base_url.replace(/^https:\/\//, 'wss://')
);
app.use(helmet.contentSecurityPolicy(csp));
}
@@ -112,7 +113,6 @@ module.exports = function(app) {
require('./params')
);
app.post(`/api/info/:id${ID_REGEX}`, auth.owner, require('./info'));
app.post('/api/metrics', require('./metrics'));
app.get('/__version__', function(req, res) {
// eslint-disable-next-line node/no-missing-require
res.sendFile(require.resolve('../../dist/version.json'));

View File

@@ -1,24 +0,0 @@
const { sendBatch, clientEvent } = require('../amplitude');
module.exports = async function(req, res) {
try {
const data = JSON.parse(req.body); // see http://crbug.com/490015
const deltaT = Date.now() - data.now;
const events = data.events.map(e =>
clientEvent(
e,
req.ua,
data.lang,
data.session_id + deltaT,
deltaT,
data.platform,
req.geo.country,
req.geo.state
)
);
const status = await sendBatch(events);
res.sendStatus(status);
} catch (e) {
res.sendStatus(500);
}
};

View File

@@ -2,7 +2,7 @@ const config = require('../config');
const storage = require('../storage');
module.exports = function(req, res) {
const max = req.user ? config.max_downloads : config.anon_max_downloads;
const max = config.max_downloads;
const dlimit = req.body.dlimit;
if (!dlimit || dlimit > max) {
return res.sendStatus(400);

View File

@@ -28,8 +28,7 @@ module.exports = async function(req, res) {
//this hasn't been updated to expiration time setting yet
//if you want to fallback to this code add this
await storage.set(newId, fileStream, meta, config.default_expire_seconds);
const protocol = config.env === 'production' ? 'https' : req.protocol;
const url = `${protocol}://${req.get('host')}/download/${newId}/`;
const url = `${config.deriveBaseUrl(req)}/download/${newId}/`;
res.set('WWW-Authenticate', `send-v1 ${meta.nonce}`);
res.json({
url,

View File

@@ -4,7 +4,6 @@ const config = require('../config');
const mozlog = require('../log');
const Limiter = require('../limiter');
const fxa = require('../fxa');
const { statUploadEvent } = require('../amplitude');
const { encryptedSize } = require('../../app/utils');
const { Transform } = require('stream');
@@ -31,15 +30,9 @@ module.exports = function(ws, req) {
const metadata = fileInfo.fileMetadata;
const auth = fileInfo.authorization;
const user = await fxa.verify(fileInfo.bearer);
const maxFileSize = user
? config.max_file_size
: config.anon_max_file_size;
const maxExpireSeconds = user
? config.max_expire_seconds
: config.anon_max_expire_seconds;
const maxDownloads = user
? config.max_downloads
: config.anon_max_downloads;
const maxFileSize = config.max_file_size;
const maxExpireSeconds = config.max_expire_seconds;
const maxDownloads = config.max_downloads;
if (config.fxa_required && !user) {
ws.send(
@@ -72,8 +65,7 @@ module.exports = function(ws, req) {
nonce: crypto.randomBytes(16).toString('base64')
};
const protocol = config.env === 'production' ? 'https' : req.protocol;
const url = `${protocol}://${req.get('host')}/download/${newId}/`;
const url = `${config.deriveBaseUrl(req)}/download/${newId}/`;
ws.send(
JSON.stringify({
@@ -108,18 +100,6 @@ module.exports = function(ws, req) {
// in order to avoid having to check socket state and clean
// up storage, possibly with an exception that we can catch.
ws.send(JSON.stringify({ ok: true }));
statUploadEvent({
id: newId,
ip: req.ip,
country: req.geo.country,
state: req.geo.state,
owner,
dlimit,
timeLimit,
anonymous: !user,
size: limiter.length,
agent: req.ua.browser.name || req.ua.ua.substring(0, 6)
});
}
} catch (e) {
log.error('upload', e);

View File

@@ -23,6 +23,7 @@ module.exports = async function(req) {
if (config.survey_url) {
prefs.surveyUrl = config.survey_url;
}
const baseUrl = config.deriveBaseUrl(req);
return {
archive: {
numFiles: 0
@@ -33,7 +34,7 @@ module.exports = async function(req) {
title: 'Send',
description:
'Encrypt and send files with a link that automatically expires to ensure your important documents dont stay online forever.',
baseUrl: config.base_url,
baseUrl,
ui: {},
storage: {
files: []

View File

@@ -8,8 +8,10 @@ module.exports = function(config) {
//eslint-disable-next-line security/detect-non-literal-require
const redis = require(redis_lib);
const client = redis.createClient({
var client_config = {
host: config.redis_host,
port: config.redis_port,
retry_strategy: options => {
if (options.total_retry_time > config.redis_retry_time) {
client.emit('error', 'Retry time exhausted');
@@ -18,7 +20,10 @@ module.exports = function(config) {
return config.redis_retry_delay;
}
});
};
if (config.redis_password != null && config.redis_password.length > 0)
client_config.password = config.redis_password;
const client = redis.createClient(client_config);
client.ttlAsync = promisify(client.ttl);
client.hgetallAsync = promisify(client.hgetall);