11 Commits

Author SHA1 Message Date
Lukas F. Hartmann
84d0be50f0 update vulnerable moment package 2018-03-30 22:42:16 +02:00
Lukas F. Hartmann
6d2d2310b6 basic import functionality; dockerfile fixes; session and cookie handling fixes 2018-03-30 22:34:27 +02:00
mntmn
462e9edaab first importer WIP 2018-01-08 15:57:59 +01:00
Lukas F. Hartmann
820203625c don't default to app volume mount; create s3 bucket on boot; fix revAll gulp step 2017-04-07 18:01:23 +02:00
Martin Guether
03059b67f1 add gulp stuff to build process 2017-04-07 15:34:33 +02:00
Martin Guether
1426bc9c24 updated storage config 2017-04-07 13:07:09 +02:00
Martin Guether
bd0471dad6 added docker ignore 2017-04-07 12:03:58 +02:00
Martin Guether
bbdcb2b2fe Merge remote-tracking branch 'origin/master' into docker
# Conflicts:
#	README.md
2017-04-07 12:00:28 +02:00
Martin Guether
af5335025f fixed crash on console 2017-04-07 11:58:29 +02:00
Martin Guether
f9cf8ba7e8 update ports and variables for docker compose bootstrap 2017-04-07 11:55:07 +02:00
Martin Guether
a3e2129b79 added experiemental docker support 2017-04-07 10:50:48 +02:00
21 changed files with 327 additions and 72 deletions

14
.dockerignore Normal file
View File

@@ -0,0 +1,14 @@
.DS_Store
.git
logs
*.log
scripts
pids
*.pid
*.seed
lib-cov
coverage
.grunt
.lock-wscript
build/Release
node_modules

29
Dockerfile Normal file
View File

@@ -0,0 +1,29 @@
FROM spacedeck/docker-baseimage:latest
ENV NODE_ENV production
RUN mkdir -p /usr/src/app
WORKDIR /usr/src/app
COPY package.json /usr/src/app/
RUN npm install
RUN npm install gulp-rev-replace gulp-clean gulp-fingerprint gulp-rev gulp-rev-all gulp-rev-replace
RUN npm install -g --save-dev gulp
COPY app.js Dockerfile Gulpfile.js LICENSE /usr/src/app/
COPY config /usr/src/app/config
COPY helpers /usr/src/app/helpers
COPY locales /usr/src/app/locales
COPY middlewares /usr/src/app/middlewares
COPY models /usr/src/app/models
COPY public /usr/src/app/public
COPY routes /usr/src/app/routes
COPY styles /usr/src/app/styles
COPY views /usr/src/app/views
RUN gulp all
RUN npm cache clean
CMD [ "node", "app.js" ]
EXPOSE 9666

View File

@@ -12,10 +12,9 @@ var uglify = require('gulp-uglify');
var fingerprint = require('gulp-fingerprint'); var fingerprint = require('gulp-fingerprint');
var rev = require('gulp-rev'); var rev = require('gulp-rev');
var RevAll = require('gulp-rev-all'); var revAll = require('gulp-rev-all');
gulp.task('rev', () => { gulp.task('rev', () => {
var revAll = new RevAll();
return gulp.src(['public/**']) return gulp.src(['public/**'])
.pipe(gulp.dest('build/assets')) .pipe(gulp.dest('build/assets'))
.pipe(revAll.revision()) .pipe(revAll.revision())

View File

@@ -23,10 +23,10 @@ We appreciate filed issues, pull requests and general discussion.
Spacedeck uses the following major building blocks: Spacedeck uses the following major building blocks:
- Vue.js (Frontend)
- Node.js 7.x (Backend / API) - Node.js 7.x (Backend / API)
- MongoDB 3.x (Datastore) - MongoDB 3.x (Datastore)
- Redis 3.x (Datastore for realtime channels) - Redis 3.x (Datastore for realtime channels)
- Vue.js (Frontend)
It also has some binary dependencies for media conversion and PDF export: It also has some binary dependencies for media conversion and PDF export:
@@ -50,8 +50,14 @@ see: config/config.json
export NODE_ENV=development export NODE_ENV=development
npm start npm start
open http://localhost:9666
open http://localhost:9000 # experimental docker(compose) support
We have a docker base image at https://github.com/spacedeck/docker-baseimage that includes all required binaries. Based on this image we can use Docker Compose to bootstrap a Spacedeck including data storage.
docker-compose build
docker-compose run -e ENV=development -p 9666:9666 -e NODE_ENV=development spacedeck-open
# License # License

7
app.js
View File

@@ -50,7 +50,7 @@ swig.setFilter('cdn', function(input, idx) {
app.engine('html', swig.renderFile); app.engine('html', swig.renderFile);
app.set('view engine', 'html'); app.set('view engine', 'html');
if (app.get('env') != 'development') { if (isProduction) {
app.set('views', path.join(__dirname, 'build', 'views')); app.set('views', path.join(__dirname, 'build', 'views'));
app.use(favicon(path.join(__dirname, 'build', 'assets', 'images', 'favicon.png'))); app.use(favicon(path.join(__dirname, 'build', 'assets', 'images', 'favicon.png')));
app.use(express.static(path.join(__dirname, 'build', 'assets'))); app.use(express.static(path.join(__dirname, 'build', 'assets')));
@@ -84,7 +84,6 @@ app.use(helmet.noSniff())
app.use(require("./middlewares/templates")); app.use(require("./middlewares/templates"));
app.use(require("./middlewares/error_helpers")); app.use(require("./middlewares/error_helpers"));
app.use(require("./middlewares/setuser")); app.use(require("./middlewares/setuser"));
app.use(require("./middlewares/subdomain"));
app.use(require("./middlewares/cors")); app.use(require("./middlewares/cors"));
app.use(require("./middlewares/i18n")); app.use(require("./middlewares/i18n"));
app.use("/api", require("./middlewares/api_helpers")); app.use("/api", require("./middlewares/api_helpers"));
@@ -129,11 +128,11 @@ if (app.get('env') == 'development') {
module.exports = app; module.exports = app;
// CONNECT TO DATABASE // CONNECT TO DATABASE
const mongoHost = process.env.MONGO_PORT_27017_TCP_ADDR || 'localhost'; const mongoHost = process.env.MONGO_PORT_27017_TCP_ADDR || config.get('mongodb_host');
mongoose.connect('mongodb://' + mongoHost + '/spacedeck'); mongoose.connect('mongodb://' + mongoHost + '/spacedeck');
// START WEBSERVER // START WEBSERVER
const port = 9000; const port = 9666;
const server = http.Server(app).listen(port, () => { const server = http.Server(app).listen(port, () => {

View File

@@ -1,9 +1,20 @@
{ {
"endpoint": "http://localhost:9000", //"endpoint": "http://localhost:9000",
"endpoint": "http://localhost:9666",
"storage_region": "eu-central-1", "storage_region": "eu-central-1",
//"storage_bucket": "sdeck-development",
//"storage_cdn": "http://localhost:9123/sdeck-development",
//"storage_endpoint": "http://storage:9000",
"storage_bucket": "my_spacedeck_bucket", "storage_bucket": "my_spacedeck_bucket",
"storage_cdn": "/storage", "storage_cdn": "/storage",
"storage_local_path": "./storage", "storage_local_path": "./storage",
"redis_mock": true,
"mongodb_host": "localhost",
"redis_host": "localhost",
"google_access" : "", "google_access" : "",
"google_secret" : "", "google_secret" : "",
"admin_pass": "very_secret_admin_password", "admin_pass": "very_secret_admin_password",

34
docker-compose.yml Normal file
View File

@@ -0,0 +1,34 @@
version: '2'
services:
sync:
image: redis
storage:
image: minio/minio
environment:
- MINIO_ACCESS_KEY=AKIAIOSFODNN7EXAMPLE
- MINIO_SECRET_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY
ports:
- 9123:9000
command: server /export
db:
image: mongo
spacedeck-open:
environment:
- env=development
- MINIO_ACCESS_KEY=AKIAIOSFODNN7EXAMPLE
- MINIO_SECRET_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY
build: .
volumes:
# - ./:/usr/src/app
- /usr/src/app/node_modules
command: npm start
ports:
- 9666:9666
depends_on:
- db
- sync
- storage
links:
- storage
- db
- sync

103
helpers/importer.js Normal file
View File

@@ -0,0 +1,103 @@
'use strict';
const extract = require('extract-zip')
const config = require('config')
const fs = require('fs')
const path = require('path')
require('../models/schema')
module.exports = {
importZIP: function(user, zipPath) {
// 1. extract zip to local storage folder
// 2. read spaces.json from this folder
// 3. iterate through spaces and read all their artifact jsons
// 4. fixup storage paths
// 5. replace creator id by user._id
let relativeImportDir = 'import_'+user._id
let importDir = path.resolve(config.get('storage_local_path')+'/'+config.get('storage_bucket')+'/'+relativeImportDir)
if (!fs.existsSync(importDir)) {
fs.mkdirSync(importDir)
}
extract(zipPath, {dir: importDir}, function(err) {
if (err) {
console.log(err)
return
}
console.log('[import] extracted to',importDir)
let spacesJson = fs.readFileSync(importDir+'/spaces.json')
let spaces = JSON.parse(spacesJson)
var homeFolderId = null
console.log('[import] spaces:',spaces.length)
// pass 1: find homefolder
for (var i=0; i<spaces.length; i++) {
let space = spaces[i]
if (!space.parent_space_id) {
homeFolderId = space._id
break
}
}
console.log("[import] homeFolderId:",homeFolderId)
for (var i=0; i<spaces.length; i++) {
let space = spaces[i]
if (space.parent_space_id) {
let artifacts = JSON.parse(fs.readFileSync(importDir+'/'+space._id+'_artifacts.json'))
console.log('[import] space',space._id,'artifacts:',artifacts.length)
let q = {_id: space._id}
space.creator = user._id
delete space.__v
// transplant homefolder
console.log("parent:",space.parent_space_id)
if (space.parent_space_id+"" == homeFolderId+"") {
space.parent_space_id = user.home_folder_id
}
Space.findOneAndUpdate(q, space, {upsert: true}, function(err,res) {
if (err) console.log("[import] space upsert err:",err)
})
for (var j=0; j<artifacts.length; j++) {
let a = artifacts[j]
let q = {_id: a._id}
a.creator = user._id
delete a.__v
delete a.payload_thumbnail_big_uri
let prefix = "/storage/"+relativeImportDir+"/"+space._id+"_files/"
if (a.thumbnail_uri && a.thumbnail_uri[0]!='/') a.thumbnail_uri = prefix + a.thumbnail_uri
if (a.payload_uri && a.payload_uri[0]!='/') a.payload_uri = prefix + a.payload_uri
if (a.payload_thumbnail_web_uri && a.payload_thumbnail_web_uri[0]!='/') a.payload_thumbnail_web_uri = prefix + a.payload_thumbnail_web_uri
if (a.payload_thumbnail_medium_uri && a.payload_thumbnail_medium_uri[0]!='/') a.payload_thumbnail_medium_uri = prefix + a.payload_thumbnail_medium_uri
if (a.payload_alternatives) {
for (var k=0; k<a.payload_alternatives.length; k++) {
let alt = a.payload_alternatives[k]
if (alt.payload_uri && alt.payload_uri[0]!='/') alt.payload_uri = prefix + alt.payload_uri
if (alt.payload_thumbnail_web_uri && alt.payload_thumbnail_web_uri[0]!='/') alt.payload_thumbnail_web_uri = prefix + alt.payload_thumbnail_web_uri
if (alt.payload_thumbnail_medium_uri && alt.payload_thumbnail_medium_uri[0]!='/') alt.payload_thumbnail_medium_uri = prefix + alt.payload_thumbnail_medium_uri
}
}
Artifact.findOneAndUpdate(q, a, {upsert: true}, function(err,res) {
if (err) console.log("[import] artifact upsert err:",err)
})
}
}
}
})
}
}

View File

@@ -53,7 +53,7 @@ module.exports = {
} }
} }
}, function(err, data) { }, function(err, data) {
if(err) console.log('Email not sent:', err); if (err) console.error("Error sending email:", err);
else console.log("Email sent."); else console.log("Email sent.");
}); });
} }

View File

@@ -32,31 +32,36 @@ module.exports = {
}; };
phantom.create({ path: require('phantomjs-prebuilt').path }, function (err, browser) { phantom.create({ path: require('phantomjs-prebuilt').path }, function (err, browser) {
return browser.createPage(function (err, page) { if(err){
console.log("page created, opening ",space_url); console.log(err);
}else{
return browser.createPage(function (err, page) {
console.log("page created, opening ",space_url);
if (type=="pdf") { if (type=="pdf") {
var psz = { var psz = {
width: space.advanced.width+"px", width: space.advanced.width+"px",
height: space.advanced.height+"px" height: space.advanced.height+"px"
}; };
page.set('paperSize', psz); page.set('paperSize', psz);
} }
page.set('settings.resourceTimeout',timeout); page.set('settings.resourceTimeout',timeout);
page.set('settings.javascriptEnabled',false); page.set('settings.javascriptEnabled',false);
return page.open(space_url, function (err,status) { return page.open(space_url, function (err,status) {
page.render(export_path, function() { page.render(export_path, function() {
on_success_called = true; on_success_called = true;
if (on_success) { if (on_success) {
on_success(export_path); on_success(export_path);
} }
page.close(); page.close();
browser.exit(); browser.exit();
});
}); });
}); });
}); }
}, { }, {
onExit: on_exit onExit: on_exit
}); });

View File

@@ -1,5 +1,7 @@
'use strict'; 'use strict';
const config = require('config');
// this is a mock version of the Redis API, // this is a mock version of the Redis API,
// emulating Redis if it is not available locally // emulating Redis if it is not available locally
var notRedis = { var notRedis = {
@@ -92,7 +94,12 @@ var notRedis = {
module.exports = { module.exports = {
connectRedis: function() { connectRedis: function() {
this.connection = notRedis; if (config.get("redis_mock")) {
this.connection = notRedis;
} else {
const redisHost = process.env.REDIS_PORT_6379_TCP_ADDR || 'sync';
this.connection = new RedisConnection(6379, redisHost);
}
}, },
getConnection: function() { getConnection: function() {
this.connectRedis(); this.connectRedis();

View File

@@ -2,21 +2,40 @@
var fs = require('fs'); var fs = require('fs');
var config = require('config'); var config = require('config');
var s3 = null;
// use AWS S3 or local folder depending on config // use AWS S3 or local folder depending on config
if (config.get("storage_local_path")) { if (config.get("storage_local_path")) {
var AWS = require('mock-aws-s3'); var AWS = require('mock-aws-s3');
AWS.config.basePath = config.get("storage_local_path"); AWS.config.basePath = config.get("storage_local_path");
s3 = new AWS.S3();
} else { } else {
var AWS = require('aws-sdk'); var AWS = require('aws-sdk');
AWS.config.region = config.get("storage_region"); var storage_endpoint = config.get("storage_endpoint");
const ep = new AWS.Endpoint(storage_endpoint);
AWS.config.update(new AWS.Config({
accessKeyId: process.env.MINIO_ACCESS_KEY,
secretAccessKey: process.env.MINIO_SECRET_KEY,
region: config.get("storage_region"),
s3ForcePathStyle: true,
signatureVersion: 'v4'
}));
s3 = new AWS.S3({
endpoint: ep
});
} }
s3.createBucket({
Bucket: config.get("storage_bucket"),
ACL: "public-read",
GrantRead: "*"
}, (err,res) => {
console.log("createBucket",err,res);
});
module.exports = { module.exports = {
removeFile: (path, callback) => { removeFile: (path, callback) => {
const s3 = new AWS.S3({
region: config.get("storage_region")
});
const bucket = config.get("storage_bucket"); const bucket = config.get("storage_bucket");
s3.deleteObject({ s3.deleteObject({
Bucket: bucket, Key: path Bucket: bucket, Key: path
@@ -34,7 +53,7 @@ module.exports = {
callback({error:"missing path"}, null); callback({error:"missing path"}, null);
return; return;
} }
console.log("[s3] uploading", localFilePath, " to ", fileName); console.log("[storage] uploading", localFilePath, " to ", fileName);
const bucket = config.get("storage_bucket"); const bucket = config.get("storage_bucket");
const fileStream = fs.createReadStream(localFilePath); const fileStream = fs.createReadStream(localFilePath);
@@ -45,10 +64,6 @@ module.exports = {
} }
}); });
fileStream.on('open', function () { fileStream.on('open', function () {
var s3 = new AWS.S3({
region: config.get("storage_region")
});
s3.putObject({ s3.putObject({
Bucket: bucket, Bucket: bucket,
Key: fileName, Key: fileName,
@@ -58,7 +73,7 @@ module.exports = {
if (err){ if (err){
console.error(err); console.error(err);
callback(err); callback(err);
}else { } else {
const url = config.get("storage_cdn") + "/" + fileName; const url = config.get("storage_cdn") + "/" + fileName;
console.log("[s3]" + localFilePath + " to " + url); console.log("[s3]" + localFilePath + " to " + url);
callback(null, url); callback(null, url);

View File

@@ -1,21 +1,28 @@
'use strict'; 'use strict';
require('../models/schema'); require('../models/schema');
const config = require('config');
const WebSocketServer = require('ws').Server; const WebSocketServer = require('ws').Server;
const RedisConnection = require('ioredis');
const async = require('async'); const async = require('async');
const _ = require("underscore"); const _ = require("underscore");
const mongoose = require("mongoose"); const mongoose = require("mongoose");
const crypto = require('crypto'); const crypto = require('crypto');
var redis = require("./redis.js"); const redisMock = require("./redis.js");
module.exports = { module.exports = {
startWebsockets: function(server) { startWebsockets: function(server) {
this.setupSubscription(); this.setupSubscription();
this.state = redis.getConnection();
if(!this.current_websockets) { if (!this.current_websockets) {
if (config.get("redis_mock")) {
this.state = redisMock.getConnection();
} else {
this.state = new RedisConnection(6379, process.env.REDIS_PORT_6379_TCP_ADDR || config.get("redis_host"));
}
this.current_websockets = []; this.current_websockets = [];
} }
@@ -118,9 +125,17 @@ module.exports = {
}, },
setupSubscription: function() { setupSubscription: function() {
this.cursorSubscriber = redis.getConnection().subscribe(['cursors', 'users', 'updates'], function (err, count) { if (config.get("redis_mock")) {
console.log("[redis] websockets to " + count + " topics." ); this.cursorSubscriber = redisMock.getConnection().subscribe(['cursors', 'users', 'updates'], function (err, count) {
}); console.log("[redis-mock] websockets subscribed to " + count + " topics." );
});
} else {
this.cursorSubscriber = new RedisConnection(6379, process.env.REDIS_PORT_6379_TCP_ADDR || config.get("redis_host"));
this.cursorSubscriber.subscribe(['cursors', 'users', 'updates'], function (err, count) {
console.log("[redis] websockets subscribed to " + count + " topics." );
});
}
this.cursorSubscriber.on('message', function (channel, rawMessage) { this.cursorSubscriber.on('message', function (channel, rawMessage) {
const msg = JSON.parse(rawMessage); const msg = JSON.parse(rawMessage);
const spaceId = msg.space_id; const spaceId = msg.space_id;

View File

@@ -5,27 +5,24 @@ var config = require('config');
module.exports = (req, res, next) => { module.exports = (req, res, next) => {
const token = req.cookies["sdsession"]; const token = req.cookies["sdsession"];
if (token && token != "null" && token !== null) { if (token && token != "null" && token !== null) {
User.findOne({ User.findOne({
"sessions.token": token "sessions.token": token
}).populate('team').exec((err, user) => { }).populate('team').exec((err, user) => {
if (err) console.error("session.token lookup error:",err);
if (!user) { if (!user) {
// FIXME res.clearCookie('sdsession');
var domain = "localhost";
res.clearCookie('sdsession', {
domain: domain
});
if (req.accepts("text/html")) { if (req.accepts("text/html")) {
res.redirect("/"); res.send("Please clear your cookies and try again.");
} else if (req.accepts('application/json')) { } else if (req.accepts('application/json')) {
res.status(403).json({ res.status(403).json({
"error": "token_not_found" "error": "token_not_found"
}); });
} else { } else {
res.redirect("/"); res.send("Please clear your cookies and try again.");
} }
} else { } else {
req["token"] = token; req["token"] = token;
req["user"] = user; req["user"] = user;

View File

@@ -45,7 +45,7 @@ module.exports.teamSchema.index({
module.exports.teamSchema.statics.getTeamForHost = (host, cb) => { module.exports.teamSchema.statics.getTeamForHost = (host, cb) => {
if (host != "127.0.0.1:9000") { //phantomjs check if (host != "127.0.0.1:9666") { //phantomjs check
let subDomainParts = host.split('.'); let subDomainParts = host.split('.');
if (subDomainParts.length > 2) { if (subDomainParts.length > 2) {

View File

@@ -23,6 +23,7 @@
"debug": "~2.6.3", "debug": "~2.6.3",
"execSync": "latest", "execSync": "latest",
"express": "~4.13.0", "express": "~4.13.0",
"extract-zip": "^1.6.6",
"glob": "7.1.1", "glob": "7.1.1",
"gm": "1.23.0", "gm": "1.23.0",
"googleapis": "18.0.0", "googleapis": "18.0.0",
@@ -40,7 +41,7 @@
"log-timestamp": "latest", "log-timestamp": "latest",
"md5": "2.2.1", "md5": "2.2.1",
"mock-aws-s3": "^2.6.0", "mock-aws-s3": "^2.6.0",
"moment": "2.18.1", "moment": "^2.19.3",
"mongoose": "4.9.3", "mongoose": "4.9.3",
"morgan": "1.8.1", "morgan": "1.8.1",
"node-phantom-simple": "2.2.4", "node-phantom-simple": "2.2.4",

View File

@@ -5,6 +5,7 @@ require('../../models/schema');
var bcrypt = require('bcryptjs'); var bcrypt = require('bcryptjs');
var crypo = require('crypto'); var crypo = require('crypto');
var URL = require('url').URL;
var express = require('express'); var express = require('express');
var router = express.Router(); var router = express.Router();
@@ -40,11 +41,11 @@ router.post('/', function(req, res) {
user.sessions.push(session); user.sessions.push(session);
user.save(function(err, result) { user.save(function(err, result) {
// FIXME if (err) console.error("Error saving user:",err);
var secure = process.env.NODE_ENV == "production" || process.env.NODE_ENV == "staging";
var domain = (process.env.NODE_ENV == "production") ? ".example.org" : "localhost"; var domain = (process.env.NODE_ENV == "production") ? new URL(config.get('endpoint')).hostname : "localhost";
res.cookie('sdsession', token, { domain: domain, httpOnly: true, secure: secure}); res.cookie('sdsession', token, { domain: domain, httpOnly: true });
res.status(201).json(session); res.status(201).json(session);
}); });
}); });
@@ -69,8 +70,7 @@ router.delete('/current', function(req, res, next) {
}); });
user.sessions = newSessions; user.sessions = newSessions;
user.save(function(err, result) { user.save(function(err, result) {
// FIXME var domain = new URL(config.get('endpoint')).hostname;
var domain = (process.env.NODE_ENV == "production") ? ".example.org" : "localhost";
res.clearCookie('sdsession', { domain: domain }); res.clearCookie('sdsession', { domain: domain });
res.sendStatus(204); res.sendStatus(204);
}); });

View File

@@ -59,7 +59,9 @@ router.get('/', (req, res) => {
"nickname": 1, "nickname": 1,
"email": 1 "email": 1
}).exec((err, user) => { }).exec((err, user) => {
a['user'] = user.toObject(); if (user) {
a['user'] = user.toObject();
}
cb(err, a); cb(err, a);
}); });
} else { } else {

View File

@@ -5,6 +5,7 @@ require('../../models/schema');
var mailer = require('../../helpers/mailer'); var mailer = require('../../helpers/mailer');
var uploader = require('../../helpers/uploader'); var uploader = require('../../helpers/uploader');
var importer = require('../../helpers/importer');
var bcrypt = require('bcryptjs'); var bcrypt = require('bcryptjs');
var crypo = require('crypto'); var crypo = require('crypto');
@@ -15,6 +16,7 @@ var fs = require('fs');
var request = require('request'); var request = require('request');
var gm = require('gm'); var gm = require('gm');
var validator = require('validator'); var validator = require('validator');
var URL = require('url').URL;
var express = require('express'); var express = require('express');
var router = express.Router(); var router = express.Router();
@@ -181,8 +183,7 @@ router.get('/loginorsignupviagoogle', function(req, res) {
var apiUrl = "https://www.googleapis.com/oauth2/v1/userinfo?alt=json&access_token=" + tokens.access_token; var apiUrl = "https://www.googleapis.com/oauth2/v1/userinfo?alt=json&access_token=" + tokens.access_token;
var finalizeLogin = function(session){ var finalizeLogin = function(session){
var secure = process.env.NODE_ENV == "production" || process.env.NODE_ENV == "staging"; res.cookie('sdsession', session.token, { httpOnly: true });
res.cookie('sdsession', session.token, { httpOnly: true, secure: secure});
res.status(201).json(session); res.status(201).json(session);
}; };
@@ -467,4 +468,13 @@ router.post('/:user_id/confirm', function(req, res, next) {
res.sendStatus(201); res.sendStatus(201);
}); });
router.get('/:user_id/import', function(req, res, next) {
if (req.query.zip) {
res.send("importing");
importer.importZIP(req.user, req.query.zip);
} else {
res.sendStatus(400);
}
});
module.exports = router; module.exports = router;

View File

@@ -53,7 +53,7 @@
<p> <p>
<div class="col-xs-6"> <div class="col-xs-6">
<a href="/contact">[[ __("contact") ]]</a> <a href="/contact">[[ __("contact") ]]</a>
<span style="color:#888">&copy; 20112017 The Spacedeck Open Developers</span> <span style="color:#888">&copy; 20112018 The Spacedeck Open Developers <a href="https://github.com/spacedeck/spacedeck-open">https://github.com/spacedeck/spacedeck-open</a></span>
</div> </div>
</p> </p>
</div> </div>

View File

@@ -23,10 +23,18 @@
{% if process.env.NODE_ENV != "production" %} {% if process.env.NODE_ENV != "production" %}
var ENV = { var ENV = {
name: 'development', name: 'development',
webHost: "localhost:9000", webHost: "localhost:9666",
webEndpoint:"http://localhost:9000", webEndpoint:"http://localhost:9666",
apiEndpoint: "http://localhost:9000", apiEndpoint: "http://localhost:9666",
websocketsEndpoint: "ws://localhost:9000" websocketsEndpoint: "ws://localhost:9666"
};
{% else %}
var ENV = {
name: 'production',
webHost: location.host,
webEndpoint: location.origin,
apiEndpoint: location.origin,
websocketsEndpoint: location.origin.replace("https:","wss:").replace("http:","ws:")
}; };
{% endif %} {% endif %}