diff --git a/lib/ontologies_linked_data/mappings/mappings.rb b/lib/ontologies_linked_data/mappings/mappings.rb index 5da7e261..f2dddc53 100644 --- a/lib/ontologies_linked_data/mappings/mappings.rb +++ b/lib/ontologies_linked_data/mappings/mappings.rb @@ -5,21 +5,32 @@ module LinkedData module Mappings OUTSTANDING_LIMIT = 30 - def self.mapping_predicates() - predicates = {} - predicates["CUI"] = ["http://bioportal.bioontology.org/ontologies/umls/cui"] - predicates["SAME_URI"] = - ["http://data.bioontology.org/metadata/def/mappingSameURI"] - predicates["LOOM"] = - ["http://data.bioontology.org/metadata/def/mappingLoom"] - predicates["REST"] = - ["http://data.bioontology.org/metadata/def/mappingRest"] - return predicates - end + def self.mapping_predicates + predicates = {} + predicates["CUI"] = ["http://bioportal.bioontology.org/ontologies/umls/cui"] + predicates["SAME_URI"] = + ["http://data.bioontology.org/metadata/def/mappingSameURI"] + predicates["LOOM"] = + ["http://data.bioontology.org/metadata/def/mappingLoom"] + predicates["REST"] = + ["http://data.bioontology.org/metadata/def/mappingRest"] + return predicates + end - def self.handle_triple_store_downtime(logger=nil) - epr = Goo.sparql_query_client(:main) - status = epr.status + def self.internal_mapping_predicates + predicates = {} + predicates["SKOS:EXACT_MATCH"] = ["http://www.w3.org/2004/02/skos/core#exactMatch"] + predicates["SKOS:CLOSE_MATCH"] = ["http://www.w3.org/2004/02/skos/core#closeMatch"] + predicates["SKOS:BROAD_MATH"] = ["http://www.w3.org/2004/02/skos/core#broadMatch"] + predicates["SKOS:NARROW_MATH"] = ["http://www.w3.org/2004/02/skos/core#narrowMatch"] + predicates["SKOS:RELATED_MATH"] = ["http://www.w3.org/2004/02/skos/core#relatedMatch"] + + return predicates + end + + def self.handle_triple_store_downtime(logger = nil) + epr = Goo.sparql_query_client(:main) + status = epr.status if status[:exception] logger.info(status[:exception]) if logger @@ -145,142 +156,59 @@ def self.empty_page(page,size) return p end - def self.mappings_ontologies(sub1,sub2,page,size,classId=nil,reload_cache=false) - union_template = <<-eos -{ - GRAPH <#{sub1.id.to_s}> { - classId ?o . - } - GRAPH graph { - ?s2 ?o . - } - bind -} -eos - blocks = [] - mappings = [] - persistent_count = 0 - acr1 = sub1.id.to_s.split("/")[-3] - - if classId.nil? - acr2 = nil - acr2 = sub2.id.to_s.split("/")[-3] unless sub2.nil? - pcount = LinkedData::Models::MappingCount.where(ontologies: acr1) - pcount = pcount.and(ontologies: acr2) unless acr2.nil? - f = Goo::Filter.new(:pair_count) == (not acr2.nil?) - pcount = pcount.filter(f) - pcount = pcount.include(:count) - pcount_arr = pcount.all - persistent_count = pcount_arr.length == 0 ? 0 : pcount_arr.first.count + def self.mappings_ontologies(sub1, sub2, page, size, classId = nil, reload_cache = false) + sub1, acr1 = extract_acronym(sub1) + sub2, acr2 = extract_acronym(sub2) - return LinkedData::Mappings.empty_page(page,size) if persistent_count == 0 - end + mappings = [] + persistent_count = 0 - if classId.nil? - union_template = union_template.gsub("classId", "?s1") - else - union_template = union_template.gsub("classId", "<#{classId.to_s}>") - end - # latest_sub_ids = self.retrieve_latest_submission_ids - - mapping_predicates().each do |_source,mapping_predicate| - union_block = union_template.gsub("predicate", mapping_predicate[0]) - union_block = union_block.gsub("bind","BIND ('#{_source}' AS ?source)") - - if sub2.nil? - union_block = union_block.gsub("graph","?g") - else - union_block = union_block.gsub("graph","<#{sub2.id.to_s}>") + if classId.nil? + persistent_count = count_mappings(acr1, acr2) + return LinkedData::Mappings.empty_page(page, size) if persistent_count == 0 end - blocks << union_block - end - unions = blocks.join("\nUNION\n") - - mappings_in_ontology = <<-eos -SELECT DISTINCT query_variables -WHERE { -unions -filter -} page_group -eos - query = mappings_in_ontology.gsub("unions", unions) - variables = "?s2 graph ?source ?o" - variables = "?s1 " + variables if classId.nil? - query = query.gsub("query_variables", variables) - filter = classId.nil? ? "FILTER ((?s1 != ?s2) || (?source = 'SAME_URI'))" : '' - if sub2.nil? - query = query.gsub("graph","?g") - ont_id = sub1.id.to_s.split("/")[0..-3].join("/") + query = mappings_ont_build_query(classId, page, size, sub1, sub2) + epr = Goo.sparql_query_client(:main) + graphs = [sub1] + unless sub2.nil? + graphs << sub2 + end + solutions = epr.query(query, graphs: graphs, reload_cache: reload_cache) + s1 = nil + s1 = RDF::URI.new(classId.to_s) unless classId.nil? + + solutions.each do |sol| + graph2 = sub2.nil? ? sol[:g] : sub2 + s1 = sol[:s1] if classId.nil? + backup_mapping = nil + + if sol[:source].to_s == "REST" + backup_mapping = LinkedData::Models::RestBackupMapping + .find(sol[:o]).include(:process, :class_urns).first + backup_mapping.process.bring_remaining + end - # latest_sub_filter_arr = latest_sub_ids.map { |_, id| "?g = <#{id}>" } - # filter += "\nFILTER (#{latest_sub_filter_arr.join(' || ')}) " + classes = get_mapping_classes_instance(s1, sub1, sol[:s2], graph2) - #STRSTARTS is used to not count older graphs - #no need since now we delete older graphs - filter += "\nFILTER (!STRSTARTS(str(?g),'#{ont_id}'))" - else - query = query.gsub("graph", "") - end - query = query.gsub("filter", filter) + mapping = if backup_mapping.nil? + LinkedData::Models::Mapping.new(classes, sol[:source].to_s) + else + LinkedData::Models::Mapping.new( + classes, sol[:source].to_s, + backup_mapping.process, backup_mapping.id) + end - if size > 0 - pagination = "OFFSET offset LIMIT limit" - query = query.gsub("page_group",pagination) - limit = size - offset = (page-1) * size - query = query.gsub("limit", "#{limit}").gsub("offset", "#{offset}") - else - query = query.gsub("page_group","") - end - epr = Goo.sparql_query_client(:main) - graphs = [sub1.id] - unless sub2.nil? - graphs << sub2.id - end - solutions = epr.query(query, graphs: graphs, reload_cache: reload_cache) - s1 = nil - unless classId.nil? - s1 = RDF::URI.new(classId.to_s) - end - solutions.each do |sol| - graph2 = nil - if sub2.nil? - graph2 = sol[:g] - else - graph2 = sub2.id - end - if classId.nil? - s1 = sol[:s1] + mappings << mapping end - classes = [ read_only_class(s1.to_s,sub1.id.to_s), - read_only_class(sol[:s2].to_s,graph2.to_s) ] - - backup_mapping = nil - mapping = nil - if sol[:source].to_s == "REST" - backup_mapping = LinkedData::Models::RestBackupMapping - .find(sol[:o]).include(:process).first - backup_mapping.process.bring_remaining - end - if backup_mapping.nil? - mapping = LinkedData::Models::Mapping.new( - classes,sol[:source].to_s) - else - mapping = LinkedData::Models::Mapping.new( - classes,sol[:source].to_s, - backup_mapping.process,backup_mapping.id) + + if size == 0 + return mappings end - mappings << mapping - end - if size == 0 - return mappings + page = Goo::Base::Page.new(page, size, persistent_count, mappings) + return page end - page = Goo::Base::Page.new(page,size,nil,mappings) - page.aggregate = persistent_count - return page - end def self.mappings_ontology(sub,page,size,classId=nil,reload_cache=false) return self.mappings_ontologies(sub,nil,page,size,classId=classId, @@ -380,7 +308,7 @@ def self.get_rest_mapping(mapping_id) GRAPH ?s2 { ?c2 <#{rest_predicate}> ?uuid . } -FILTER(?uuid = <#{mapping_id}>) +FILTER(?uuid = <#{LinkedData::Models::Base.replace_url_prefix_to_id(mapping_id)}>) FILTER(?s1 != ?s2) } LIMIT 1 eos @@ -437,7 +365,7 @@ def self.create_rest_mapping(classes,process) graph_insert << [c.id, RDF::URI.new(rest_predicate), backup_mapping.id] Goo.sparql_update_client.insert_data(graph_insert, graph: sub.id) end - mapping = LinkedData::Models::Mapping.new(classes,"REST",process) + mapping = LinkedData::Models::Mapping.new(classes,"REST", process, backup_mapping.id) return mapping end @@ -773,5 +701,115 @@ def self.create_mapping_count_pairs_for_ontologies(logger, arr_acronyms) # fsave.close end + private + + def self.get_mapping_classes_instance(s1, graph1, s2, graph2) + [read_only_class(s1.to_s, graph1.to_s), + read_only_class(s2.to_s, graph2.to_s)] + end + + def self.mappings_ont_build_query(class_id, page, size, sub1, sub2) + blocks = [] + mapping_predicates.each do |_source, mapping_predicate| + blocks << mappings_union_template(class_id, sub1, sub2, + mapping_predicate[0], + "BIND ('#{_source}' AS ?source)") + end + + + + + + + filter = class_id.nil? ? "FILTER ((?s1 != ?s2) || (?source = 'SAME_URI'))" : '' + if sub2.nil? + + class_id_subject = class_id.nil? ? '?s1' : "<#{class_id.to_s}>" + source_graph = sub1.nil? ? '?g' : "<#{sub1.to_s}>" + internal_mapping_predicates.each do |_source, predicate| + blocks << <<-eos + { + GRAPH #{source_graph} { + #{class_id_subject} <#{predicate[0]}> ?s2 . + } + BIND( AS ?g) + BIND(?s2 AS ?o) + BIND ('#{_source}' AS ?source) + } + eos + end + + ont_id = sub1.to_s.split("/")[0..-3].join("/") + #STRSTARTS is used to not count older graphs + #no need since now we delete older graphs + + filter += "\nFILTER (!STRSTARTS(str(?g),'#{ont_id}')" + filter += " || " + internal_mapping_predicates.keys.map{|x| "(?source = '#{x}')"}.join('||') + filter += ")" + end + + variables = "?s2 #{sub2.nil? ? '?g' : ''} ?source ?o" + variables = "?s1 " + variables if class_id.nil? + + pagination = '' + if size > 0 + limit = size + offset = (page - 1) * size + pagination = "OFFSET #{offset} LIMIT #{limit}" + end + + query = <<-eos +SELECT DISTINCT #{variables} +WHERE { + #{blocks.join("\nUNION\n")} + #{filter} +} #{pagination} + eos + + query + end + + def self.mappings_union_template(class_id, sub1, sub2, predicate, bind) + class_id_subject = class_id.nil? ? '?s1' : "<#{class_id.to_s}>" + target_graph = sub2.nil? ? '?g' : "<#{sub2.to_s}>" + union_template = <<-eos +{ + GRAPH <#{sub1.to_s}> { + #{class_id_subject} <#{predicate}> ?o . + } + GRAPH #{target_graph} { + ?s2 <#{predicate}> ?o . + } + #{bind} +} + eos + end + + def self.count_mappings(acr1, acr2) + count = LinkedData::Models::MappingCount.where(ontologies: acr1) + count = count.and(ontologies: acr2) unless acr2.nil? + f = Goo::Filter.new(:pair_count) == (not acr2.nil?) + count = count.filter(f) + count = count.include(:count) + pcount_arr = count.all + pcount_arr.length == 0 ? 0 : pcount_arr.first.count + end + + def self.extract_acronym(submission) + sub = submission + if submission.nil? + acr = nil + elsif submission.respond_to?(:id) + # Case where sub2 is a Submission + sub = submission.id + acr = sub.to_s.split("/")[-3] + else + acr = sub.to_s + end + + return sub, acr + end + + end end -end + diff --git a/lib/ontologies_linked_data/models/base.rb b/lib/ontologies_linked_data/models/base.rb index 7742329d..6f58e736 100644 --- a/lib/ontologies_linked_data/models/base.rb +++ b/lib/ontologies_linked_data/models/base.rb @@ -27,12 +27,10 @@ def delete(*args) # Override find method to make sure the id matches what is in the RDF store # Only do this if the setting is enabled, string comparison sucks def self.find(id, *options) - if LinkedData.settings.replace_url_prefix && id.to_s.start_with?(LinkedData.settings.rest_url_prefix) - id = RDF::IRI.new(id.to_s.sub(LinkedData.settings.rest_url_prefix, LinkedData.settings.id_url_prefix)) - end + id = replace_url_prefix_to_id(id) # Handle `+` to ` ` conversion here because Sinatra doesn't do it for URI's - id = id.gsub("+", " ") unless id.start_with?("http") + id = id.gsub('+', ' ') unless id.start_with?('http') super(id, *options) end @@ -137,8 +135,33 @@ def self.goo_aggregates_to_load(attributes = []) included_aggregates end + def self.replace_url_prefix_to_id(id) + if replace_url_prefix?(id) + id = RDF::IRI.new(id.to_s.sub(LinkedData.settings.rest_url_prefix, LinkedData.settings.id_url_prefix)) + end + id + end + + def self.replace_url_id_to_prefix(id) + if replace_url_id?(id) + id.to_s.gsub(LinkedData.settings.id_url_prefix, LinkedData.settings.rest_url_prefix) + else + id + end + end + + def self.replace_url_prefix?(id) + LinkedData.settings.replace_url_prefix && id.to_s.start_with?(LinkedData.settings.rest_url_prefix) + end + + def self.replace_url_id?(id) + LinkedData.settings.replace_url_prefix && id.to_s.start_with?(LinkedData.settings.id_url_prefix) + end + private + + ## # Looks for an object 'owner' and looks in Thread.current[:remote_user] # to see if the user for this request matches the owner diff --git a/lib/ontologies_linked_data/monkeypatches/object.rb b/lib/ontologies_linked_data/monkeypatches/object.rb index 067e1c83..deadf71c 100644 --- a/lib/ontologies_linked_data/monkeypatches/object.rb +++ b/lib/ontologies_linked_data/monkeypatches/object.rb @@ -155,16 +155,10 @@ def convert_nonstandard_types(value, options, &block) ## # If the config option is set, turn http://data.bioontology.org urls into the configured REST url def convert_url_prefix(value) - if LinkedData.settings.replace_url_prefix - if value.is_a?(String) && value.start_with?(LinkedData.settings.id_url_prefix) - value = value.sub(LinkedData.settings.id_url_prefix, LinkedData.settings.rest_url_prefix) - end - - if (value.is_a?(Array) || value.is_a?(Set)) && value.first.is_a?(String) && value.first.start_with?(LinkedData.settings.id_url_prefix) - value = value.map {|v| v.sub(LinkedData.settings.id_url_prefix, LinkedData.settings.rest_url_prefix)} - end + tmp = Array(value).map do |val| + LinkedData::Models::Base.replace_url_id_to_prefix(val) end - value + value.is_a?(Array) || value.is_a?(Set) ? tmp : tmp.first end ## diff --git a/lib/ontologies_linked_data/serializers/json.rb b/lib/ontologies_linked_data/serializers/json.rb index bb8d9b6c..7d8c5034 100644 --- a/lib/ontologies_linked_data/serializers/json.rb +++ b/lib/ontologies_linked_data/serializers/json.rb @@ -11,8 +11,8 @@ def self.serialize(obj, options = {}) # Add the id to json-ld attribute if current_cls.ancestors.include?(LinkedData::Hypermedia::Resource) && !current_cls.embedded? && hashed_obj.respond_to?(:id) - prefixed_id = LinkedData.settings.replace_url_prefix ? hashed_obj.id.to_s.gsub(LinkedData.settings.id_url_prefix, LinkedData.settings.rest_url_prefix) : hashed_obj.id.to_s - hash["@id"] = prefixed_id + prefixed_id = LinkedData::Models::Base.replace_url_id_to_prefix(hashed_obj.id) + hash["@id"] = prefixed_id.to_s end # Add the type hash["@type"] = current_cls.type_uri.to_s if hash["@id"] && current_cls.respond_to?(:type_uri) diff --git a/lib/ontologies_linked_data/serializers/xml.rb b/lib/ontologies_linked_data/serializers/xml.rb index 575158eb..a694ee0a 100644 --- a/lib/ontologies_linked_data/serializers/xml.rb +++ b/lib/ontologies_linked_data/serializers/xml.rb @@ -9,8 +9,8 @@ def self.serialize(obj, options) current_cls = hashed_obj.respond_to?(:klass) ? hashed_obj.klass : hashed_obj.class # Add the id and type if current_cls.ancestors.include?(LinkedData::Hypermedia::Resource) && !current_cls.embedded? - prefixed_id = LinkedData.settings.replace_url_prefix ? hashed_obj.id.to_s.gsub(LinkedData.settings.id_url_prefix, LinkedData.settings.rest_url_prefix) : hashed_obj.id.to_s - hash["id"] = prefixed_id + prefixed_id = LinkedData::Models::Base.replace_url_id_to_prefix(hashed_obj.id) + hash["id"] = prefixed_id.to_s hash["type"] = current_cls.type_uri.to_s end diff --git a/test/models/test_mappings.rb b/test/models/test_mappings.rb index 0bf1aff0..a9f94192 100644 --- a/test/models/test_mappings.rb +++ b/test/models/test_mappings.rb @@ -40,6 +40,11 @@ def self.ontologies_parse() LinkedData::Mappings.create_mapping_counts(Logger.new(TestLogFile.new)) end + def delete_all_rest_mappings + LinkedData::Models::RestBackupMapping.all.each do |m| + LinkedData::Mappings.delete_rest_mapping(m.id) + end + end def test_mapping_count_models LinkedData::Models::MappingCount.where.all do |x| x.delete @@ -107,9 +112,7 @@ def validate_mapping(map) end def test_mappings_ontology - LinkedData::Models::RestBackupMapping.all.each do |m| - LinkedData::Mappings.delete_rest_mapping(m.id) - end + delete_all_rest_mappings LinkedData::Mappings.create_mapping_counts(Logger.new(TestLogFile.new)) assert LinkedData::Models::MappingCount.where.all.length > 2 #bro @@ -226,40 +229,21 @@ def test_mappings_two_ontologies end def test_mappings_rest - mapping_term_a = ["http://bioontology.org/ontologies/BiomedicalResourceOntology.owl#Image_Algorithm", - "http://bioontology.org/ontologies/BiomedicalResourceOntology.owl#Image", - "http://bioontology.org/ontologies/BiomedicalResourceOntology.owl#Integration_and_Interoperability_Tools" ] - submissions_a = [ -"http://data.bioontology.org/ontologies/MAPPING_TEST1/submissions/latest", -"http://data.bioontology.org/ontologies/MAPPING_TEST1/submissions/latest", -"http://data.bioontology.org/ontologies/MAPPING_TEST1/submissions/latest" ] - mapping_term_b = ["http://purl.org/incf/ontology/Computational_Neurosciences/cno_alpha.owl#cno_0000202", - "http://purl.org/incf/ontology/Computational_Neurosciences/cno_alpha.owl#cno_0000203", - "http://purl.org/incf/ontology/Computational_Neurosciences/cno_alpha.owl#cno_0000205" ] - submissions_b = [ -"http://data.bioontology.org/ontologies/MAPPING_TEST2/submissions/latest", -"http://data.bioontology.org/ontologies/MAPPING_TEST2/submissions/latest", -"http://data.bioontology.org/ontologies/MAPPING_TEST2/submissions/latest" ] - relations = [ "http://www.w3.org/2004/02/skos/core#exactMatch", - "http://www.w3.org/2004/02/skos/core#closeMatch", - "http://www.w3.org/2004/02/skos/core#relatedMatch" ] - user = LinkedData::Models::User.where.include(:username).all[0] - assert user != nil + delete_all_rest_mappings + mapping_term_a, mapping_term_b, submissions_a, submissions_b, relations, user = rest_mapping_data + mappings_created = [] 3.times do |i| - process = LinkedData::Models::MappingProcess.new - process.name = "proc#{i}" - process.relation = RDF::URI.new(relations[i]) - process.creator= user - process.save - classes = [] - classes << LinkedData::Mappings.read_only_class( - mapping_term_a[i], submissions_a[i]) - classes << LinkedData::Mappings.read_only_class( - mapping_term_b[i], submissions_b[i]) - mappings_created << LinkedData::Mappings.create_rest_mapping(classes, process) + classes = get_mapping_classes(term_a:mapping_term_a[i], term_b: mapping_term_b[i], + submissions_a: submissions_a[i], submissions_b: submissions_b[i]) + + mappings_created << create_rest_mapping(relation: RDF::URI.new(relations[i]), + user: user, + classes: classes, + name: "proc#{i}") end + ont_id = submissions_a.first.split("/")[0..-3].join("/") latest_sub = LinkedData::Models::Ontology.find(RDF::URI.new(ont_id)).first.latest_submission LinkedData::Mappings.create_mapping_counts(Logger.new(TestLogFile.new)) @@ -308,4 +292,78 @@ def test_mappings_rest end assert_equal 3, rest_mapping_count end + + def test_get_rest_mapping + mapping_term_a, mapping_term_b, submissions_a, submissions_b, relations, user = rest_mapping_data + + classes = get_mapping_classes(term_a:mapping_term_a[0], term_b: mapping_term_b[0], + submissions_a: submissions_a[0], submissions_b: submissions_b[0]) + + mappings_created = [] + mappings_created << create_rest_mapping(relation: RDF::URI.new(relations[0]), + user: user, + classes: classes, + name: "proc#{0}") + + assert_equal 1, mappings_created.size + created_mapping_id = mappings_created.first.id + + refute_nil LinkedData::Mappings.get_rest_mapping(created_mapping_id) + + old_replace = LinkedData.settings.replace_url_prefix + LinkedData.settings.replace_url_prefix = true + + old_rest_url = LinkedData.settings.rest_url_prefix + LinkedData.settings.rest_url_prefix = 'data.test.org' + + refute_nil LinkedData::Mappings.get_rest_mapping(LinkedData::Models::Base.replace_url_id_to_prefix(created_mapping_id)) + + LinkedData.settings.rest_url_prefix = old_rest_url + LinkedData.settings.replace_url_prefix = old_replace + end + + private + + def get_mapping_classes(term_a:, term_b:, submissions_a:, submissions_b:) + classes = [] + classes << LinkedData::Mappings.read_only_class( + term_a, submissions_a) + classes << LinkedData::Mappings.read_only_class( + term_b, submissions_b) + classes + end + + def rest_mapping_data + mapping_term_a = ["http://bioontology.org/ontologies/BiomedicalResourceOntology.owl#Image_Algorithm", + "http://bioontology.org/ontologies/BiomedicalResourceOntology.owl#Image", + "http://bioontology.org/ontologies/BiomedicalResourceOntology.owl#Integration_and_Interoperability_Tools" ] + submissions_a = [ + "http://data.bioontology.org/ontologies/MAPPING_TEST1/submissions/latest", + "http://data.bioontology.org/ontologies/MAPPING_TEST1/submissions/latest", + "http://data.bioontology.org/ontologies/MAPPING_TEST1/submissions/latest" ] + mapping_term_b = ["http://purl.org/incf/ontology/Computational_Neurosciences/cno_alpha.owl#cno_0000202", + "http://purl.org/incf/ontology/Computational_Neurosciences/cno_alpha.owl#cno_0000203", + "http://purl.org/incf/ontology/Computational_Neurosciences/cno_alpha.owl#cno_0000205" ] + submissions_b = [ + "http://data.bioontology.org/ontologies/MAPPING_TEST2/submissions/latest", + "http://data.bioontology.org/ontologies/MAPPING_TEST2/submissions/latest", + "http://data.bioontology.org/ontologies/MAPPING_TEST2/submissions/latest" ] + relations = [ "http://www.w3.org/2004/02/skos/core#exactMatch", + "http://www.w3.org/2004/02/skos/core#closeMatch", + "http://www.w3.org/2004/02/skos/core#relatedMatch" ] + + user = LinkedData::Models::User.where.include(:username).all[0] + assert user != nil + + [mapping_term_a, mapping_term_b, submissions_a, submissions_b, relations, user] + end + + def create_rest_mapping(relation:, user:, name:, classes:) + process = LinkedData::Models::MappingProcess.new + process.name = name + process.relation = relation + process.creator = user + process.save + LinkedData::Mappings.create_rest_mapping(classes, process) + end end