简体   繁体   中英

Creating Connection of an existing Mongo DB Database in an existing Docker Container

I want to create a connection from an existing Mongo DB collection to an existing Docker container. Can anybody see it out.

I tried out several syntaxes and approaches but It always give me error. Thanks in advance!

player-cloud_1  | error  { MongoError: failed to connect to server [cluster0.bgso9.mongodb.net:27017] on first connect [MongoError: getaddrinfo ENOTFOUND cluster0.bgso9.mongodb.net cluster0.bgso9.mongodb.net:27017]
player-cloud_1  |     at Pool.<anonymous> (/app/node_modules/mongodb-core/lib/topologies/server.js:336:35)
player-cloud_1  |     at Pool.emit (events.js:182:13)
player-cloud_1  |     at Pool.EventEmitter.emit (domain.js:442:20)
player-cloud_1  |     at Connection.<anonymous> (/app/node_modules/mongodb-core/lib/connection/pool.js:280:12)
player-cloud_1  |     at Object.onceWrapper (events.js:273:13)
player-cloud_1  |     at Connection.emit (events.js:182:13)
player-cloud_1  |     at Connection.EventEmitter.emit (domain.js:442:20)
player-cloud_1  |     at Socket.<anonymous> (/app/node_modules/mongodb-core/lib/connection/connection.js:189:49)
player-cloud_1  |     at Object.onceWrapper (events.js:273:13)
player-cloud_1  |     at Socket.emit (events.js:182:13)
player-cloud_1  |     at Socket.EventEmitter.emit (domain.js:442:20)
player-cloud_1  |     at emitErrorNT (internal/streams/destroy.js:82:8)
player-cloud_1  |     at emitErrorAndCloseNT (internal/streams/destroy.js:50:3)
player-cloud_1  |     at process._tickCallback (internal/process/next_tick.js:63:19)
player-cloud_1  |   name: 'MongoError',
player-cloud_1  |   message:
player-cloud_1  |    'failed to connect to server [cluster0.bgso9.mongodb.net:27017] on first connect [MongoError: getaddrinfo ENOTFOUND cluster0.bgso9.mongodb.net cluster0.bgso9.mongodb.net:27017]' }
player-cloud_1  | error connecting to the database

Code Snippet:


    const express = require('express');
    const path = require('path');
    const logger = require('morgan');
    const cookieParser = require('cookie-parser');
    const bodyParser = require('body-parser');
    const passport = require('passport');
    const adaro = require('adaro');
    const session = require('express-session');
    const MemoryStore = require('memorystore')(session);
    const flash = require('connect-flash');
    const Raven = require('raven');
    const cors = require('cors');
    const mongoose = require('mongoose');

    // const { connectDB } = require('./db/mongo');

    const { DSN } = require('./config/raven');

    const app = express();

    // If this is production, initialize Sentry.
    const isProd = process.env.NODE_ENV === 'production';
    if (isProd) {
        Raven.config(DSN).install();
        app.use(Raven.requestHandler());
    }

    // mongo db connection 
    mongoose.Promise = global.Promise;
    const mongoURI =
      "mongodb+srv://dbUser:*****@cluster0.bgso9.mongodb.net/Playercloud?retryWrites=true&w=majority";

    mongoose.connect(mongoURI, {
        useMongoClient:true
    }).then(() => {
        console.log('successfully connected to the database');
    }).catch(err => {
        console.log('error ', err);
        console.log('error connecting to the database');
        // process.exit();
    });


    // view engine setup
    app.engine('dust', adaro.dust());
    app.set('views', path.join(__dirname, 'views'));
    app.set('view engine', 'dust');

    require('./config/passport')(passport); // pass passport for configuration


    // uncomment after placing your favicon in /public
    // app.use(favicon(path.join(__dirname, 'public', 'favicon.ico')));
    if (!isProd) {
        app.use(logger('dev'));
    }
    app.use(bodyParser.json({
        limit: '25mb'
    }));
    app.use(bodyParser.urlencoded({ extended: false }));
    app.use(cookieParser());
    app.use(express.static(path.join(__dirname, 'public')));
    app.use(cors({ origin: ['http://localhost:3000', null, 'null'], credentials: true }));

    // required for passport
    app.use(session({
        store: new MemoryStore({
            checkPeriod: 86400000
        }),
        secret: 'basanti$aaj@izat*ka!sawal^hai',
        resave: true,
        saveUninitialized: true
    }));
    app.use(passport.initialize());
    app.use(passport.session()); // persistent login sessions
    app.use(flash());
    app.use((req, res, next) => {
        res.locals.flash = req.flash();
        next();
    });

    // routes ====================================================================== >>>>>>>>>>>
    require('./app/routes.js')(app, passport); // load our routes and pass in our app and fully configured passport

    // Error handlers.
    if (isProd) {
        app.use(Raven.errorHandler());
    }

    app.use((err, req, res, _) => { // eslint-disable-line
        console.log(err);
        res.sendStatus(500);
    });

    module.exports = app;

Docker file

version: "3"
services:
  player-cloud:
    build: .
    # image: warisanwar/player-cloud-v1:latest
    # deploy:
    #   replicas: 1
    #   placement:
    #     constraints: [node.role == manager]
    ports:
      - "3001:3001"
    volumes:
      - ./player-cloud/avatars:/app/public/avatars
    depends_on:
      - "rethinkdb"
      - "neo4j"
      - "redis"
    networks:
      - webnet
    command: ["./wait-for-it.sh", "neo4j:7687", "--", "npm", "start"]
  rethinkdb:
    image: rethinkdb:2.4.0
    ports:
      - "8080:8080"
      - "28015:28015"
      - "29015:29015"
    volumes:
      - ./player-cloud/data:/data
    # deploy:
    #   replicas: 1
    #   placement:
    #     constraints: [node.role == manager]
    networks:
      - webnet
  neo4j:
    image: neo4j:3.5.17
    ports:
      - "7474:7474"
      - "7687:7687"
    volumes:
      - ./player-cloud/neo:/data
      - ./player-cloud/neo-logs:/logs
    # deploy:
    #   resources:
    #     limits:
    #       memory: 512M
    #     reservations:
    #       memory: 512M
    environment:
      - NEO4J_dbms_memory_pagecache_size=50M
      - NEO4J_dbms_memory_heap_maxSize=180M
    # deploy:
    #   replicas: 1
    #   placement:
    #     constraints: [node.role == manager]
    networks:
      - webnet
  redis:
    image: redis:latest
    # deploy:
    #   replicas: 1
    #   placement:
    #     constraints: [node.role == manager]
    #   resources:
    #     limits:
    #       memory: 256M
    #     reservations:
    #       memory: 128M
    networks:
      - webnet
  # nginx:
  #   image:
  #     nginx:latest
  #   ports:
  #     - "80:80"
  #     - "443:443"
    # deploy:
    #   replicas: 1
    #   placement:
    #     constraints: [node.role == manager]
    # depends_on:
    #   - "player-cloud"
    # volumes:
    #   - ./nginx/nginx.conf:/etc/nginx/conf.d/nginx.conf
    #   - ./var/www/playercloud:/var/www/playercloud
    #   - ./etc/letsencrypt/live/playercloud.in/fullchain.pem:/etc/letsencrypt/live/playercloud.in/fullchain.pem
    #   - ./etc/letsencrypt/live/playercloud.in/privkey.pem:/etc/letsencrypt/live/playercloud.in/privkey.pem
    # networks:
    #   - webnet
networks:
  webnet:

Never mind it was resolved in end!

The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM