How can I start testing for database connections in CircleCI? I am aware of creating a secondary docker image and have all your database settings in it. But i am not aware on how to do that, are there any examples that I can follow? Documentation covers the basics, but I couldn’t get much help from it.
I have a python test file that requires DB connections -
class TestDataPlatformConnection(object):
    dp = dp.DataPlatformConnection()
    def test_insert_from_dicts(self):
        with pytest.raises(Exception):
            self.dp.insert_from_dicts('', '', ['test1', 'test2'])
    def test_create_table_sql(self):
        assert self.dp.create_table_sql('prod', 'school', ['school_name', 'city']) == \
            'CREATE TABLE "prod"."school" ("school_name" text , "city" text);'
        assert self.dp.create_table_sql('dev', 'geocoding', ['lat', 'long']) == \
            'CREATE TABLE "dev"."geocoding" ("lat" text , "long" text);'
        with pytest.raises(Exception):
            _ = self.dp.create_table_sql('sandbox', 'school', None)
        with pytest.raises(Exception):
            _ = self.dp.create_table_sql('sandbox', 'school', [])
        with pytest.raises(Exception):
            _ = self.dp.create_table_sql('sandbox', 'school', 'bad columns value')
        with pytest.raises(Exception):
            _ = self.dp.create_table_sql('', '', ['lat', 'long'])
        with pytest.raises(Exception):
            _ = self.dp.create_table_sql(None, None, ['lat', 'long'])
    def test_drop_temp_table_sql(self):
        assert self.dp.drop_temp_table_sql('transit') == \
            'DROP TABLE IF EXISTS transient."transit"'
        with pytest.raises(Exception):
            _ = self.dp.drop_temp_table_sql('')
        with pytest.raises(Exception):
            _ = self.dp.drop_temp_table_sql(None)
        with pytest.raises(TypeError):
            self.dp.drop_temp_table_sql(12345)
    def test_duplicate_table_sql(self):
        assert self.dp.duplicate_table_sql('sandbox', 'school', 'prod', 'school') == \
            'CREATE TABLE "sandbox"."school" AS SELECT * FROM "prod"."school" LIMIT 0'
    def test_copy_csv_sql(self):
        assert self.dp.copy_csv_sql('sandbox', 'school') == \
            'COPY "sandbox"."school" FROM STDIN WITH CSV HEADER DELIMITER AS \',\''
        assert self.dp.copy_csv_sql('sandbox', 'school', header=False) == \
            'COPY "sandbox"."school" FROM STDIN WITH CSV DELIMITER AS \',\''
        assert self.dp.copy_csv_sql('sandbox', 'school') == \
            'COPY "sandbox"."school" FROM STDIN WITH CSV HEADER DELIMITER AS \',\''
    def test_csv_export_sql(self):
        assert self.dp.csv_export_sql('SELECT * FROM prod.geo') == \
            'COPY (SELECT * FROM prod.geo) TO STDOUT WITH CSV HEADER'
        assert self.dp.csv_export_sql('') == \
            'COPY () TO STDOUT WITH CSV HEADER'
    def test_table_sample_sql(self):
        assert self.dp.table_sample_sql('prod', 'school', 100) == \
            'SELECT * FROM "prod"."school" LIMIT 100'
        with pytest.raises(Exception):
            _ = self.dp.table_sample_sql('prod', 'school', '100')
what settings do I need to put in my DB docker image for these tests to work?
my config.yml file looks like this -
version: 2
jobs:
  build:
    docker:
      - image: circleci/python:2.7.14-jessie
        environment:
          DP_DBNAME: ""
          DP_USER: ""
          DP_PASSWORD: ""
          DP_HOST: ""
          DP_PORT: ""
          DP_LOG_LEVEL: ""
          DP_PHOTO_S3_BUCKET: ""
          # prod CREA credentials
          DP_CREA_DDF_USERNAME: ""
          DP_CREA_DDF_PASSWORD: ""
          DP_CREA_DDF_LOGIN_URL: ""
          # TREB DLA credentials
          DP_TREB_DLA_USERNAME: ""
          DP_TREB_DLA_PASSWORD: ""
          DP_TREB_DLA_PHOTOS_FTP_SERVER_URL: ""
          DP_TREB_DLA_FORM_POST_URL: ""
          # TREB VOW credentials
          DP_TREB_VOW_USERNAME: ""
          DP_TREB_VOW_USERNAME_ACTIVES: ""
          DP_TREB_VOW_PASSWORD: ""
          DP_TREB_VOW_LOGIN_URL: ""
          DP_TREB_RETS_LOGIN_URL: ""
          # BRC credentials
          DP_BRC_USERNAME: ""
          DP_BRC_PASSWORD: ""
          # BRC IDX credentials
          DP_BRC_IDX_LOGIN_URL: ""
          DP_BRC_IDX_REW_USERNAME: ""
          DP_BRC_IDX_REW_PASSWORD: ""
          DP_BRC_IDX_BROKER_USERNAME: ""
          DP_BRC_IDX_BROKER_PASSWORD: ""
          # remaining credentials
          DP_VIREB_IDX_LOGIN_URL: ""
          DP_VREB_IDX_LOGIN_URL: ""
          DP_WLS_IDX_LOGIN_URL: ""
          DP_TREB_IDX_USERNAME: ""
          DP_VIREB_IDX_USERNAME: ""
          DP_VREB_IDX_USERNAME: ""
          DP_WLS_IDX_USERNAME: ""
          DP_TREB_IDX_USERNAME_ACTIVES: ""
          DP_VIREB_IDX_PASSWORD: ""
          DP_VREB_IDX_PASSWORD: ""
          DP_WLS_IDX_PASSWORD: ""
          DP_TREB_IDX_PASSWORD: ""
          # Full path to Google API keyfile with data write access to Google BigQuery event tracking project
          GOOGLE_APPLICATION_CREDENTIALS: ""
          # Twilio
          TWILIO_ACCOUNT_SID: ""
          TWILIO_AUTH_TOKEN: ""
          TWILIO_SOURCE_PHONE_NUMBER: ""
          TWILIO_MY_PHONE_NUMBER: ""
          # AWS
          AWS_ACCESS_KEY: ""
          AWS_SECRET_KEY: ""
          DP_LISTING_PHOTOS_S3_BUCKET: ""
      - image: circleci/postgres:9.6.5-alpine-ram
        environment:
          POSTGRES_USER: root
          POSTGRES_DB: circle_test
          POSTGRES_PASSWORD: ""
    steps:
      - checkout
      - run:
          name: Install Python dependencies in a venv
          command: |
            virtualenv venv
            . venv/bin/activate
            cd database_jobs
            python -m pip install -r requirements.txt
      - run:
          name: Running tests
          command: |
            . venv/bin/activate
            cd database_jobs
            python -m pytest -s -vv tests/ --ignore=tests/test_data_platform.py --ignore=tests/test_sql_helper.py
