Connect from application in Docker to Neo4j database in Docker

Problem Description:

I’m working on a project using LangChain to connect to a Neo4j database hosted in a Docker container. I’ve set up the environment using docker-compose, but I’m encountering a Connection Refused error when attempting to interact with Neo4j through the Neo4jVector integration from LangChain.

Here’s what I’ve done so far:

My python worked properly when I used neo4j API and use cloud instance. But when I shifted to docker for neo4j. After than I am facing this error.

This works

[I tested direct connectivity using GraphDatabase.driver and confirmed Neo4j is reachable and it works properly.]

from neo4j import GraphDatabase

uri = "bolt://neo4j_database:7687"
username = "neo4j"
password = "neo4j_admin"

driver = GraphDatabase.driver(uri, auth=(username, password))

def push_data(tx):
    # Create historical figures as nodes
    tx.run("""
        CREATE (mahendra:Person {name: 'King Mahendra', role: 'King of Nepal', reign: '1955-1972'})
        CREATE (rana_regime:Event {name: 'Rana Regime', description: 'Autocratic rule by the Rana family in Nepal', period: '1846-1951'})
        CREATE (tribhuvan:Person {name: 'King Tribhuvan', role: 'King of Nepal', reign: '1911-1955'})
        CREATE (prithvi:Person {name: 'King Prithvi Narayan Shah', role: 'Founder of Unified Nepal', reign: '1743-1775'})
        CREATE (bp_koirala:Person {name: 'BP Koirala', role: 'First Democratically Elected Prime Minister of Nepal', term: '1959-1960'})
        CREATE (military_coup:Event {name: '1960 Military Coup', description: 'King Mahendra’s coup that dissolved the democratic government', year: 1960})
    """)

    # Create relationships
    tx.run("""
        MATCH (mahendra:Person {name: 'King Mahendra'}),
              (tribhuvan:Person {name: 'King Tribhuvan'}),
              (rana_regime:Event {name: 'Rana Regime'}),
              (prithvi:Person {name: 'King Prithvi Narayan Shah'}),
              (bp_koirala:Person {name: 'BP Koirala'}),
              (military_coup:Event {name: '1960 Military Coup'})
        CREATE (tribhuvan)-[:ENDED]->(rana_regime)
        CREATE (mahendra)-[:CONDUCTED]->(military_coup)
        CREATE (bp_koirala)-[:OVERTHROWN_BY]->(military_coup)
        CREATE (prithvi)-[:FOUNDED]->(:Entity {name: 'Unified Nepal'})
        CREATE (mahendra)-[:SUCCEEDED]->(tribhuvan)
    """)

try:
    with driver.session() as session:
        session.execute_write(push_data)
        print("Data pushed successfully!")
        print(uri)
except Exception as e:
    print(f"An error occurred: {e}")
finally:
    driver.close()

Error:

Embedding model initialized successfully.
Error creating vector index: [Errno 111] Connection refused

[This is my environment variable.]

langchain == 0.3.13
neo4j == 5.27.0
langchain_ollama == 0.2.2

Environment Variables:
NEO4J_URI=bolt://172.20.0.3:7687 
NEO4J_USERNAME=neo4j 
NEO4J_PASSWORD=neo4j_admin

The Neo4j browser is accessible at http://localhost:7474 and bolt://172.20.0.3:7687..

But when I try to use it with langchain and ollama I am facing this issue.

[This is my langchain code which isn't working.]

from langchain_ollama import OllamaEmbeddings
from langchain_community.vectorstores.neo4j_vector import Neo4jVector
import os
from dotenv import load_dotenv
from langchain.schema import Document

load_dotenv()

URI = os.getenv("NEO4J_URL")
username = os.getenv("NEO4J_USERNAME")
password = os.getenv("NEO4J_PASSWORD")
chunked_folder_path = os.getenv("CHUNK_FOLDER_PATH")
index = "vector"
keyword_index_name = "keyword"

print("neo4j url:" ,URI)
print("neo4j username:" ,username)
print("neo4j password:" ,password)

def read_documents(chunked_folder_path):
    docs = []
    try:
        for filename in os.listdir(chunked_folder_path):
            if filename.endswith(".txt"):
                file_path = os.path.join(chunked_folder_path, filename)
                with open(file_path, 'r', encoding='utf-8') as file:
                    content = file.read()
                    docs.append(Document(page_content=content, metadata={"filename": filename}))
            else:
                print(f"Skipped non-txt file: {filename}")
    except Exception as e:
        print(f"Error reading documents from folder: {e}")
    return docs


def retrieval_from_graph(documents):
    try:
        embedding_model = OllamaEmbeddings(model="mxbai-embed-large")
        print("Embedding model initialized successfully.")
    except Exception as e:
        print(f"Error initializing the embedding model: {e}")
        return  None
    
    try:
        vectorstore = Neo4jVector.from_existing_index(
            embedding=embedding_model,
            url=URI,
            username=username,
            password=password,
            search_type="hybrid",
            index_name = index,
            keyword_index_name=keyword_index_name,
            node_label=["Events", "Person"],
            embedding_node_property="embedding",
        )
        print("Successfully connected to the existing Neo4j vector index.")
        return vectorstore
    except Exception as e:
        print(f"Existing index not found, Creating a new one ......: {e}")

    documents = read_documents(chunked_folder_path)
    if not documents:
        print("No documents were loaded. Cannot create index")
        return  None
    try:
        vectorstore = Neo4jVector.from_documents(
            embedding=embedding_model,
            documents=documents,
            url=URI,
            username=username,
            password=password,
            search_type="hybrid",
            index_name = index,
            keyword_index_name=keyword_index_name,
            node_label=["Events", "Person"],
            embedding_node_property="embedding",
        )
        print("New vector index created successfully")
        return vectorstore
    except Exception as creation_error:
        print(f"Error creating vector index: {creation_error}")



def similarity_search(vectorstore, query):
    try:
        docs_with_score = vectorstore.similarity_search_with_score(query, k=2)
        for doc, score in docs_with_score:
            print(f"Document: {doc.page_content}\nScore: {score}")
    except Exception as e:
        print(f"Error during similarity search: {e}")

def query_similarity_search(query ):
    documents = read_documents(chunked_folder_path)
    if not documents:
        print("No document found in the folder")
        return
    vectorstore = retrieval_from_graph(documents)

    if not vectorstore:
        print("Failed to create vector store")
        return 
    result =similarity_search(vectorstore, query)
    return result

if __name__ == "__main__":
    query = "Who is King Birendra"
    query_similarity_search(query)

[This is my docker-compose.yaml .]

services:
  web:
    build: 
      context: .
      dockerfile: Dockerfile
    environment:
      DATABASE_URL: "postgresql://postgres_user:postgres_password@db:5432/postgres_db"
      OLLAMA_URL: "http://ollama:11434"  
      QDRANT_URL: "http://qdrant_database:6333"
      NEO4J_URL: "bolt://neo4j_database:7687"
    ports:
      - "8000:8000"
    depends_on:
      - db
      - ollama
      - qdrant_database
      - neo4j
    volumes:
      - .:/app
    networks:
      - my_network
    command: ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]
    restart: always
  
  db:
    image: postgres:13
    environment:
      POSTGRES_USER: postgres_user
      POSTGRES_PASSWORD: postgres_password
      POSTGRES_DB: postgres_db
    volumes:
      - pgdata:/var/lib/postgresql/data
    ports:
      - "5432:5432"
    networks:
      - my_network

    
  ollama:
    image: ollama/ollama:latest
    container_name: ollama
    restart: unless-stopped
    ports:
      - "11434:11434"
    volumes:
    - ~/.ollama/models:/root/.ollama/models
    deploy:
      resources:
        reservations:
          devices:
          - driver: nvidia
            capabilities: [gpu]
            count: all
    networks:
      - my_network


  qdrant_database:
    image: qdrant/qdrant:latest
    container_name: qdrant_database
    restart: unless-stopped
    ports:
      - "6333:6333"
    environment:
      QDRANT_CONFIG_PATH: /qdrant/config.yml
    volumes:
      - ./qdrant/config.yml:/qdrant/config.yml
      - ./qdrant_data:/qdrant/storage/
      - ./retrievers:/app/retrieval/
    networks:
      - my_network

  neo4j:
    image: neo4j:latest
    container_name: neo4j_database
    restart: unless-stopped
    ports:
      - "7474:7474"
      - "7687:7687"
    environment:
      - NEO4J_AUTH=neo4j/neo4j_admin
      - NEO4J_dbms_connector_bolt_listen__address=0.0.0.0:7687
      - NEO4J_dbms_connector_bolt_advertised__address=:7687
      - NEO4J_dbms_connector_http_advertised__address=:7474
      - NEO4J_dbms_default__listen__address=0.0.0.0
    volumes:
      - ./neo4j/data:/data
      - ./neo4j/logs:/logs
      - ./neo4j/import:/neo4j/import
      - ./neo4j/plugins:/plugins
    networks:
      - my_network

volumes:
  pgdata:
    driver: local
  neo4j_data:
    driver: local
  neo4j_import:
    driver: local
  neo4j_plugins:
    driver: local
  neo4j_logs:
    driver: local
networks:
  my_network:
    driver: bridge

Why am I getting a Connection Refused error when using LangChain's Neo4jVector? How can I resolve this issue while maintaining the current Docker setup?