I am trying to upload file, i am using "Angular File Upload" for it. Trying to connect and send file to mongodb but it gives this error on firebug
net::ERR_INSECURE_RESPONSE
In mongodb.log log files it shows the connection is done:
2014-11-09T11:57:05.512+0400 I NETWORK [initandlisten] connection accepted from xxx.x.x.x:53749 #2 (1 connection now open)
2014-11-09T11:57:05.524+0400 I NETWORK [conn2] end connection xxx.x.x.x:53749 (0 connections now open)
I have also create and used ssl certificate.
My Angular Js code is:
$scope.onFileSelect = function($files) {
//$files: an array of files selected, each file has name, size, and type.
for (var i = 0; i < $files.length; i++) {
var $file = $files[i];
$upload.upload({
url: "https://localhost:27017/mydb",
file: $file,
progress: function(e){ console.log("Progress: ");}
}).then(function(data, status, headers, config) {
// file is uploaded successfully
console.log("File Uploaded : "+ data);
});
}
};
You'll need an interim step there. Going directly from Angular to Mongo will not work out. If you want a generic REST interface to Mongo with which you can utilize Angular's bundled $http services, take a look at the list of REST services on Mongo's site.
Mongo REST Services
http://www.mongodb.org/display/DOCS/Http+Interface#HttpInterface-RESTInterfaces
Angular $http Service:
http://docs.angularjs.org/api/ng.$http
There's a lot of different options here, but this is likely the easiest way to get up and going.
var mongoose = require('mongoose');
var db = mongoose.createConnection('mongodb://localhost:3000/database');
var orderSchema = new mongoose.Schema({
routeFrom : String,
routeTo : String,
leaving: String
});
var Order = db.model('Order', orderSchema);
module.exports = Order;
Related
Connects without a hitch, but on insert() throws me this error.
var MongoClient = require('mongodb').MongoClient;
const assert = require('assert');
var url = 'mongodb://____:____#ds125565.mlab.com:25565/heroku_w268n9pj';
MongoClient.connect(url, function(err, client) {
assert.equal(null, err);
db = client.db('temp');
console.log("connected!");
const collection = db.collection('temp');
collection.insert([{
something: please
}
});
I saw some other answers regarding mLab accounts and credentials, but I just created a new admin account for this. Frustrating because it was working previously with v2.3.
When attempting to connect to an mlab database, you have to correctly specify the client. It's located at the end of your connection string, just after the final forward slash.
mlab_url = "mongodb://db_user_name:db_password#ds139725.mlab.com:39725/heroku_sd1fp182?retryWrites=false"
client = MongoClient(url)
db = client["heroku_sd1fp182"]
collection = db["coinHack"]
You may also get the error:
This MongoDB deployment does not support retryable writes. Please add retryWrites=false to your connection string.
Just add "?retryWrites=false" to your connection string, as shown above.
I have one Lotus DB (db1) on one server and second lotus db (db2) on another server.
How can I receive data from db1 to db2 via REST service (REST is configured in db1 to receive data from view in db1)?
I think the task must be clear, but I can't find a simple solution to this task :(
Use REST service's property databaseName with !! between server and database path:
<xe:restService id="restService2" pathInfo=...>
<xe:this.service>
<xe:viewItemFileService
databaseName="Server01!!Test/db1.nsf"
viewName="viewInDb1"
...>
</xe:viewItemFileService>
</xe:this.service>
</xe:restService>
You can call the URL:
var host = facesContext.getExternalContext().getRequest().getHeader("Host");
var path = facesContext.getExternalContext().getRequest().getContextPath();
var page = "/RESTServiceProvider.xsp/hotSheets?rName=AarionPerez";
var url = "http://" + host + path + page;
var url:java.net.URL = new java.net.URL(url);
var urlconn:java.net.URLConnection = url.openConnection();
var reader:java.io.BufferedReader = new java.io.BufferedReader(new java.io.InputStreamReader(urlconn.getInputStream()));
var inputLine;
var inJson = "";
while ((inputLine = reader.readLine()) != null){
inJson += inputLine;
}
reader.close();
This is a call from a NSF, but you can put a non-NSF URL in and get a response. You may need to pass in authentication as well.
I'm building a Meteor (meteorjs) app that needs to store and display PDF files, sometimes as large as 500Mb. GridFS doesn't seem to be integrated yet so I'm wondering if it's worth using Meteor in this case or stick to Rails.
Ideally I would not use S3 - I'd like to keep the files on my server.
UPDATE: it seems it's possible to connect outside of Meteor directly, I don't need PDFs to be automatically moved - and it likely doesn't make sense.
More specifically I'm now looking at:
MongoDB -> ElasticSearch using https://github.com/richardwilly98/elasticsearch-river-mongodb
Using the instructions at https://github.com/richardwilly98/elasticsearch-river-mongodb/wiki
You can use GridFS inside meteor without touching any extra package
var db = MongoInternals.defaultRemoteCollectionDriver().mongo.db; //grab the database object
var GridStore = MongoInternals.NpmModule.GridStore;
WebApp.connectHandlers.use('/someurl', function(req, res) {
var bigFile = new GridStore(db, 'bigfile.iso', 'r') //to read
bigFile.open(function(error, result) {
if (error) return
bigFile.stream(); //stream the file
bigFile.on('error', function(e) {...}) //handle error etc
bigFile.on('end', function() {bigFile.close();}); //close the file when done
bigFile.pipe(res); //pipe the file to res
});
});
However, the current GridStore/mongo (v1.3.x) used by Meteor is a bit dated, the newest verion is 2.x from http://mongodb.github.io/node-mongodb-native/2.0/api-docs/
The v1.x doesnt seem to pipe well so you may need to use the newer version
The second option
var db = MongoInternals.defaultRemoteCollectionDriver().mongo.db; //grab the database object
var GridStore = Npm.require('mongodb').GridStore; //add Npm.depends({mongodb:'2.0.13'}) in your package.js
WebApp.connectHandlers.use('/someurl', function(req, res) {
var bigFile = new GridStore(db, 'bigfile.iso', 'r').stream(true); //the new API doens't require bigFile.open() and will close automatically on end
bigFile.on('error', function(e) {...}); //handle error etc
bigFile.on('end', function() {...});
bigFile.pipe(res); //pipe the file to res
});
In this example, I use the WebApp.connectHandlers, but of course you can use iron: router or something. I tried with a file of 500 MB and it pipes all well. You also need to set the res.writeHead(200) and other stuff such as content-type, etc
I use chokidar to monitor if files have been changed in a folder. At the moment it is triggered when a user updates an Experiment Model in my ExperimentController.
var chokidar = require('chokidar');
...
var watcher = chokidar.watch('assets/output-model-files', {ignored: /[\/\\]\./, persistent: true});
watcher.on('change', function(path) {
...read my changed file and update the content of my database, and send a socket publishUpdate message...
... read file content in an asynchronous way
fs.readFile(path,"utf-8", function (err, data) {
... update the experiment object with the content of the changed file
Experiment.update(req.param('id'), ExpObj, function expUpdated(err) {});
... send a message via socket saying that the exeriment object has been updated
Experiment.publishUpdate(req.param('id'), {name: exp.name,
results: JSON.stringify(myres),
action: ('file has just been updated. nb of trajectories: '+totalNbTrajectories)
});
But I would like to constantly monitor any change in the target folder and send Experiment.publishUpdate messages when it happens from the moment when the sails.js server starts, and not when a user update an experiment object.
Where could I place that chokidar.watch(...) code on the server side so as to update an experiment object from a file change? socket.js?
Ok. I found that locating the code in bootstrap.js seems to do the job perfectly reagrding the even triggering...
My bootstrap.js now looks like:
var chokidar = require('chokidar');
var fs = require('fs');
var os = require('os');
var sys = require('sys');
module.exports.bootstrap = function(cb) {
// It's very important to trigger this callack method when you are finished
// with the bootstrap! (otherwise your server will never lift, since it's waiting on the bootstrap)
User.update({}, {online: false},
function userUpdated(err, users) {
if (err) {
console.log(err);
} else {
var watcher = chokidar.watch('assets/output-model-files', {ignored: /[\/\\]\./, persistent: true});
watcher.on('change', function(path) {
console.log('File', path, 'has been changed');
// do file reading and presumably Experiment publish update here
});
}
cb();
}
)
};
I have started building a REST api using expressJS. I am new to node so please bear with me. I want to be able to let users upload a file directly to Mongo's GridFS using a post to the /upload route.
From what I understand in expressJS documentation the req.files.image object is available in the route after uploading, which also includes a path and filename attribute. But how can I exactly read the image data and store it into GridFS?
I have looked into gridfs-stream but I can't tie ends together. Do I first need to read the file and then use that data for the writestream pipe? Or can I just use the file object from express and use those attributes to construct a writestream? Any pointers would be appreciated!
Here's a simple demo:
var express = require('express');
var fs = require('fs');
var mongo = require('mongodb');
var Grid = require('gridfs-stream');
var db = new mongo.Db('test', new mongo.Server("127.0.0.1", 27017), { safe : false });
db.open(function (err) {
if (err) {
throw err;
}
var gfs = Grid(db, mongo);
var app = express();
app.use(express.bodyParser());
app.post('/upload', function(req, res) {
var tempfile = req.files.filename.path;
var origname = req.files.filename.name;
var writestream = gfs.createWriteStream({ filename: origname });
// open a stream to the temporary file created by Express...
fs.createReadStream(tempfile)
.on('end', function() {
res.send('OK');
})
.on('error', function() {
res.send('ERR');
})
// and pipe it to gfs
.pipe(writestream);
});
app.get('/download', function(req, res) {
// TODO: set proper mime type + filename, handle errors, etc...
gfs
// create a read stream from gfs...
.createReadStream({ filename: req.param('filename') })
// and pipe it to Express' response
.pipe(res);
});
app.listen(3012);
});
I use httpie to upload a file:
http --form post localhost:3012/upload filename#~/Desktop/test.png
You can check your database if the file is uploaded:
$ mongofiles list -d test
connected to: 127.0.0.1
test.png 5520
You can also download it again:
http --download get localhost:3012/download?filename=test.png