I have a Cypher script at /import/neo4j_importer_cypher_script_2024-12-19.cypher
(all csvs are there too):
:param {
// Define the file path root and the individual file names required for loading.
// https://neo4j.com/docs/operations-manual/current/configuration/file-locations/
file_path_root: 'file:///', // Change this to the folder your script can access the files at.
file_0: 'parsed_classes.csv',
file_1: 'parsed_interfaces.csv',
file_2: 'parsed_methods.csv',
file_3: 'parsed_constructors.csv',
file_4: 'parsed_class_methods.csv',
file_5: 'parsed_class_interfaces.csv',
file_6: 'parsed_interface_methods.csv',
file_7: 'parsed_method_objexprs.csv',
file_8: 'parsed_class_constructors.csv',
file_9: 'parsed_class_superclasses.csv',
file_10: 'parsed_method_methodcallexprs.csv'
};
// CONSTRAINT creation
// -------------------
//
// Create node uniqueness constraints, ensuring no duplicates for the given node label and ID property exist in the database. This also ensures no duplicates are introduced in future.
//
// NOTE: The following constraint creation syntax is generated based on the current connected database version 5.26-aura.
CREATE CONSTRAINT `qualified_name_Class_uniq` IF NOT EXISTS
FOR (n: `Class`)
REQUIRE (n.`qualified_name`) IS UNIQUE;
CREATE CONSTRAINT `qualified_signature_Method_uniq` IF NOT EXISTS
FOR (n: `Method`)
REQUIRE (n.`qualified_signature`) IS UNIQUE;
CREATE CONSTRAINT `qualified_name_Interface_uniq` IF NOT EXISTS
FOR (n: `Interface`)
REQUIRE (n.`qualified_name`) IS UNIQUE;
CREATE CONSTRAINT `qualified_signature_Constructor_uniq` IF NOT EXISTS
FOR (n: `Constructor`)
REQUIRE (n.`qualified_signature`) IS UNIQUE;
:param {
idsToSkip: []
};
// NODE load
// ---------
//
// Load nodes in batches, one node label at a time. Nodes will be created using a MERGE statement to ensure a node with the same label and ID property remains unique. Pre-existing nodes found by a MERGE statement will have their other properties set to the latest values encountered in a load file.
//
// NOTE: Any nodes with IDs in the 'idsToSkip' list parameter will not be loaded.
LOAD CSV WITH HEADERS FROM ($file_path_root + $file_0) AS row
WITH row
WHERE NOT row.`qualified_name` IN $idsToSkip AND NOT row.`qualified_name` IS NULL
CALL {
WITH row
MERGE (n: `Class` { `qualified_name`: row.`qualified_name` })
SET n.`qualified_name` = row.`qualified_name`
SET n.`name` = row.`name`
SET n.`modifiers` = row.`modifiers`
SET n.`line_begin` = toInteger(trim(row.`line_begin`))
SET n.`line_end` = toInteger(trim(row.`line_end`))
SET n.`file` = row.`file`
SET n.`definition` = row.`definition`
} IN TRANSACTIONS OF 10000 ROWS;
LOAD CSV WITH HEADERS FROM ($file_path_root + $file_2) AS row
WITH row
WHERE NOT row.`qualified_signature` IN $idsToSkip AND NOT row.`qualified_signature` IS NULL
CALL {
WITH row
MERGE (n: `Method` { `qualified_signature`: row.`qualified_signature` })
SET n.`qualified_signature` = row.`qualified_signature`
SET n.`name` = row.`name`
SET n.`modifiers` = row.`modifiers`
SET n.`is_default` = toLower(trim(row.`is_default`)) IN ['1','true','yes']
SET n.`line_begin` = toInteger(trim(row.`line_begin`))
SET n.`line_end` = toInteger(trim(row.`line_end`))
SET n.`file` = row.`file`
SET n.`definition` = row.`definition`
} IN TRANSACTIONS OF 10000 ROWS;
LOAD CSV WITH HEADERS FROM ($file_path_root + $file_1) AS row
WITH row
WHERE NOT row.`qualified_name` IN $idsToSkip AND NOT row.`qualified_name` IS NULL
CALL {
WITH row
MERGE (n: `Interface` { `qualified_name`: row.`qualified_name` })
SET n.`qualified_name` = row.`qualified_name`
SET n.`name` = row.`name`
SET n.`modifiers` = row.`modifiers`
SET n.`line_begin` = toInteger(trim(row.`line_begin`))
SET n.`line_end` = toInteger(trim(row.`line_end`))
SET n.`file` = row.`file`
SET n.`definition` = row.`definition`
} IN TRANSACTIONS OF 10000 ROWS;
LOAD CSV WITH HEADERS FROM ($file_path_root + $file_3) AS row
WITH row
WHERE NOT row.`qualified_signature` IN $idsToSkip AND NOT row.`qualified_signature` IS NULL
CALL {
WITH row
MERGE (n: `Constructor` { `qualified_signature`: row.`qualified_signature` })
SET n.`qualified_signature` = row.`qualified_signature`
SET n.`name` = row.`name`
SET n.`modifiers` = row.`modifiers`
SET n.`line_begin` = toInteger(trim(row.`line_begin`))
SET n.`line_end` = toInteger(trim(row.`line_end`))
SET n.`file` = row.`file`
SET n.`definition` = row.`definition`
} IN TRANSACTIONS OF 10000 ROWS;
// RELATIONSHIP load
// -----------------
//
// Load relationships in batches, one relationship type at a time. Relationships are created using a MERGE statement, meaning only one relationship of a given type will ever be created between a pair of nodes.
LOAD CSV WITH HEADERS FROM ($file_path_root + $file_4) AS row
WITH row
CALL {
WITH row
MATCH (source: `Class` { `qualified_name`: row.`qualified_name` })
MATCH (target: `Method` { `qualified_signature`: row.`qualified_signature` })
MERGE (source)-[r: `hasMethod`]->(target)
} IN TRANSACTIONS OF 10000 ROWS;
LOAD CSV WITH HEADERS FROM ($file_path_root + $file_10) AS row
WITH row
CALL {
WITH row
MATCH (source: `Method` { `qualified_signature`: row.`qualified_signature_method_decl` })
MATCH (target: `Method` { `qualified_signature`: row.`qualified_signature_method_call` })
MERGE (source)-[r: `callsMethod`]->(target)
} IN TRANSACTIONS OF 10000 ROWS;
LOAD CSV WITH HEADERS FROM ($file_path_root + $file_5) AS row
WITH row
CALL {
WITH row
MATCH (source: `Class` { `qualified_name`: row.`qualified_name` })
MATCH (target: `Interface` { `qualified_name`: row.`interface_qualified_name` })
MERGE (source)-[r: `implements`]->(target)
} IN TRANSACTIONS OF 10000 ROWS;
LOAD CSV WITH HEADERS FROM ($file_path_root + $file_6) AS row
WITH row
CALL {
WITH row
MATCH (source: `Interface` { `qualified_name`: row.`qualified_name` })
MATCH (target: `Method` { `qualified_signature`: row.`qualified_signature` })
MERGE (source)-[r: `hasInterfaceMethod`]->(target)
} IN TRANSACTIONS OF 10000 ROWS;
LOAD CSV WITH HEADERS FROM ($file_path_root + $file_8) AS row
WITH row
CALL {
WITH row
MATCH (source: `Class` { `qualified_name`: row.`qualified_name` })
MATCH (target: `Constructor` { `qualified_signature`: row.`qualified_signature` })
MERGE (source)-[r: `hasConstructor`]->(target)
} IN TRANSACTIONS OF 10000 ROWS;
LOAD CSV WITH HEADERS FROM ($file_path_root + $file_7) AS row
WITH row
CALL {
WITH row
MATCH (source: `Method` { `qualified_signature`: row.`qualified_signature_method` })
MATCH (target: `Constructor` { `qualified_signature`: row.`qualified_signature_ctr` })
MERGE (source)-[r: `callsConstructor`]->(target)
} IN TRANSACTIONS OF 10000 ROWS;
LOAD CSV WITH HEADERS FROM ($file_path_root + $file_9) AS row
WITH row
CALL {
WITH row
MATCH (source: `Class` { `qualified_name`: row.`qualified_name` })
MATCH (target: `Class` { `qualified_name`: row.`superclass_qualified_name` })
MERGE (source)-[r: `extends`]->(target)
} IN TRANSACTIONS OF 10000 ROWS;
When I try to run the query from inside a Neo4j docker container (v5.26.0):
CALL apoc.cypher.runFile("neo4j_importer_cypher_script_2024-12-19.cypher");
nothing happens and I get this (see image)
Why is this happening? I also have apoc-5.26.0-extended.jar
at the graph-db/plugins
directory.
This is the compose.yaml:
graphdb:
container_name: mjolnir
build:
dockerfile: docker/neo4j/Dockerfile #Relative path is resolved from the build context
context: .
restart: always
ports:
- "7474:7474"
- "7687:7687"
environment:
NEO4J_AUTH: ${NEO4J_AUTH}
NEO4J_apoc_export_file_enabled: "true"
NEO4J_apoc_import_file_enabled: "true"
NEO4J_apoc_import_file_use__neo4j__config: "true"
volumes:
- ./graph-db/import:/import
- ./graph-db/plugins:/plugins