text
stringlengths 1
93.6k
|
---|
for seg in list_seg:
|
if "JOIN" in seg:
|
total_join += 1
|
for i in range(total_join):
|
final_candi = final_candi + add_reverse_index(final_candi, i + 1)
|
return final_candi
|
def add_reverse_index(list_of_e, join_id):
|
added_list = []
|
list_of_e_copy = list_of_e.copy()
|
for exp in list_of_e_copy:
|
list_seg = exp.split(" ")
|
count = 0
|
for i, seg in enumerate(list_seg):
|
if "JOIN" in seg and "." in list_seg[i + 1]:
|
count += 1
|
if count != join_id:
|
continue
|
list_seg[i + 1] = "(R " + list_seg[i + 1] + ")"
|
added_list.append(" ".join(list_seg))
|
break
|
if "JOIN" in seg and "(R" in list_seg[i + 1]:
|
count += 1
|
if count != join_id:
|
continue
|
list_seg[i + 1] = ""
|
list_seg[i + 2] = list_seg[i + 2][:-1]
|
added_list.append(" ".join(" ".join(list_seg).split()))
|
break
|
return added_list
|
def bound_to_existed(question, s_expression, found_mids, two_hop_rela_dict,
|
relationship_to_enti, hsearcher, rela_corpus, relationships):
|
possible_relationships_can = []
|
possible_relationships = []
|
# logger.info("before 2 hop rela")
|
updating_two_hop_rela_dict = two_hop_rela_dict.copy()
|
for mid in found_mids:
|
if mid in updating_two_hop_rela_dict:
|
relas = updating_two_hop_rela_dict[mid]
|
possible_relationships_can += list(set(relas[0]))
|
possible_relationships_can += list(set(relas[1]))
|
else:
|
relas = get_2hop_relations(mid)
|
updating_two_hop_rela_dict[mid] = relas
|
possible_relationships_can += list(set(relas[0]))
|
possible_relationships_can += list(set(relas[1]))
|
# logger.info("after 2 hop rela")
|
for rela in possible_relationships_can:
|
if not rela.startswith('common') and not rela.startswith('base') and not rela.startswith('type'):
|
possible_relationships.append(rela)
|
if not possible_relationships:
|
possible_relationships = relationships.copy()
|
expression_segment = s_expression.split(" ")
|
# print("possible_relationships: ", possible_relationships)
|
possible_relationships = list(set(possible_relationships))
|
relationship_replace_dict = {}
|
lemma_tags = {"NNS", "NNPS"}
|
for i, seg in enumerate(expression_segment):
|
processed_seg = seg.strip(')')
|
if '.' in seg and not seg.startswith('m.') and not seg.startswith('g.') and not (
|
expression_segment[i - 1].endswith("AND") or expression_segment[i - 1].endswith("COUNT") or
|
expression_segment[i - 1].endswith("MAX") or expression_segment[i - 1].endswith("MIN")) and (
|
not any(ele.isupper() for ele in seg)):
|
tokenized_query = re.split('\.|_', processed_seg)
|
tokenized_query = " ".join(tokenized_query)
|
tokenized_question = question.strip(' ?')
|
tokenized_query = tokenized_query + ' ' + tokenized_question
|
searched_results = hsearcher.search(tokenized_query, k=1000)
|
top3_ques = []
|
for hit in searched_results:
|
if len(top3_ques) > 7:
|
break
|
cur_result = json.loads(rela_corpus.doc(str(hit.docid)).raw())
|
cur_rela = cur_result['rel_ori']
|
if not cur_rela.startswith("base.") and not cur_rela.startswith("common.") and \
|
not cur_rela.endswith("_inv.") and len(cur_rela.split('.')) > 2 and \
|
cur_rela in possible_relationships:
|
top3_ques.append(cur_rela)
|
logger.info("top3_ques rela: {}".format(top3_ques))
|
relationship_replace_dict[i] = top3_ques[:7]
|
if len(relationship_replace_dict) > 5:
|
return None, updating_two_hop_rela_dict, None
|
elif len(relationship_replace_dict) >= 3:
|
for key in relationship_replace_dict:
|
relationship_replace_dict[key] = relationship_replace_dict[key][:4]
|
combinations = list(relationship_replace_dict.values())
|
all_iters = list(itertools.product(*combinations))
|
rela_index = list(relationship_replace_dict.keys())
|
# logger.info("all_iters: {}".format(all_iters))
|
for iters in all_iters:
|
expression_segment_copy = expression_segment.copy()
|
possible_entities_set = []
|
for i in range(len(iters)):
|
suffix = ""
|
for k in range(len(expression_segment[rela_index[i]].split(')')) - 1):
|
suffix = suffix + ')'
|
expression_segment_copy[rela_index[i]] = iters[i] + suffix
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.