Hi, I setup the below code below to help me illustrate my question: Is there a way to define the relationship type using a dataframe column in spark?
thanks!
-bob
import pandas as pd
_list =
_dict = {}
_dict['ENV'] = "DEV"
_dict['PRIVILEGE'] = "DML"
_dict['ROLE'] = "ROLE1"
_dict['DATABASE'] = "Database1"
_list.append(_dict)
_dict['ENV'] = "DEV"
_dict['PRIVILEGE'] = "DDL"
_dict['ROLE'] = "ROLE2"
_dict['DATABASE'] = "Database1"
_list.append(_dict)
df = pd.DataFrame(_list)
df = spark.createDataFrame(df)
df.write.format("org.neo4j.spark.DataSource")
.mode("Overwrite")
.option("relationship", df["PRIVILEGE"])
.option("relationship.save.strategy", "keys")
.option("relationship.target.labels", "DATABASE")
.option("relationship.target.node.keys", "DATABASE, ENV")
.option("relationship.target.save.mode", "overwrite")
.option("relationship.source.labels", "ROLE")
.option("relationship.source.save.mode", "overwrite")
.option("relationship.source.node.keys", "ROLE, ENV")
.save()