Upload 2 files
Browse files- constants.py +37 -0
- make_origin_dataset.py +116 -0
constants.py
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
relations_map = {
|
| 2 |
+
'social_intercation': ['oEffect', 'oReact', 'oWant', 'xAttr', 'xEffect', 'xIntent', 'xNeed', 'xReact', 'xWant'],
|
| 3 |
+
'physical_entity' : ['ObjectUse', 'AtLocation', 'Desires', 'NotDesires', 'HasProperty', 'MadeUpOf', 'CapableOf'],
|
| 4 |
+
'event_centered' : ['isAfter', 'isBefore', 'HasSubEvent', 'HinderedBy', 'Causes', 'xReason', 'isFilledBy'],
|
| 5 |
+
}
|
| 6 |
+
|
| 7 |
+
social_intercation_2_descriptions = {
|
| 8 |
+
'xIntent': 'Why does X cause the event?',
|
| 9 |
+
'xNeed' : 'What does X need to do before the event?',
|
| 10 |
+
'xAttr' : 'How would X be described?',
|
| 11 |
+
'xEffect': 'What effects does the event have on X?',
|
| 12 |
+
'xWant' : 'What would X likely want to do after the event?',
|
| 13 |
+
'xReact' : 'How does X feel after the event?',
|
| 14 |
+
'oEffect': 'What effects does the event have on others?',
|
| 15 |
+
'oReact' : 'How do others\' feel after the event?',
|
| 16 |
+
'oWant' : 'What would others likely want to do after the event?',
|
| 17 |
+
}
|
| 18 |
+
|
| 19 |
+
physical_entity_2_descriptions = {
|
| 20 |
+
'ObjectUse' : 'ObjectUse describes everyday affordances or uses of objects, and includes both typical and atypical uses.',
|
| 21 |
+
'AtLocation' : 'AtLocation is a spatial relation that describes the location in/on/at which an entity is likely to be found.',
|
| 22 |
+
'Desires' : 'Desires are relations that deal with desires of sentient entities.',
|
| 23 |
+
'HasProperty': 'HasProperty usually describes entities\' general characteristics. In certain case, the relation can also map to descriptors that speak to the substance or value of items.',
|
| 24 |
+
'NotDesires' : 'Not desires are relations that deal with the absence of desires in sentient entities.',
|
| 25 |
+
'MadeUpOf' : 'MadeUpOf describes a part, portion or makeup of an entity.',
|
| 26 |
+
'CapableOf' : 'CapableOf describes abilities and capabilities of everyday living entities and natural entites that can exert a force. CapableOf includes general capabilities and specialized capabilities.',
|
| 27 |
+
}
|
| 28 |
+
|
| 29 |
+
event_centered_2_descriptions = {
|
| 30 |
+
'isAfter' : 'isAfter introduces events that can follow an event.',
|
| 31 |
+
'HasSubEvent': 'HasSubEvent provides the internal structure of an event, each tail denoting a step within the larger head event.',
|
| 32 |
+
'isBefore' : 'isBefore introduces events that can precede an event.',
|
| 33 |
+
'HinderedBy' : 'HinderedyBy introduces hindrances that obstruct the natural path to the achievement of a goal.',
|
| 34 |
+
'Causes' : 'Causes specifically captures the causal relation between two events or entities.',
|
| 35 |
+
'xReason' : 'xReason provides a post-fact explanation of the cause of an event.',
|
| 36 |
+
'isFilledBy' : 'isFilledBy provides a filler phrase for an event with a blank that is sensical and commonly acceptable for the event.',
|
| 37 |
+
}
|
make_origin_dataset.py
ADDED
|
@@ -0,0 +1,116 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from collections import Counter
|
| 2 |
+
from typing import List
|
| 3 |
+
|
| 4 |
+
import datasets
|
| 5 |
+
import matplotlib.pyplot as plt
|
| 6 |
+
import pandas as pd
|
| 7 |
+
from constants import (event_centered_2_descriptions,
|
| 8 |
+
physical_entity_2_descriptions, relations_map,
|
| 9 |
+
social_intercation_2_descriptions)
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def show_bar(relation: List):
|
| 13 |
+
c = dict(Counter(relation))
|
| 14 |
+
|
| 15 |
+
keys = list(c.keys())
|
| 16 |
+
values = list(c.values())
|
| 17 |
+
plt.bar(keys, values)
|
| 18 |
+
plt.xticks(rotation=25, fontsize=8)
|
| 19 |
+
plt.yticks(fontsize=8)
|
| 20 |
+
|
| 21 |
+
plt.xlabel('relations')
|
| 22 |
+
plt.ylabel('numbers')
|
| 23 |
+
plt.title('relations analysis')
|
| 24 |
+
|
| 25 |
+
for i in range(len(keys)):
|
| 26 |
+
plt.text(i, values[i] + 10, str(values[i]), ha='center', fontsize=10)
|
| 27 |
+
|
| 28 |
+
plt.show()
|
| 29 |
+
|
| 30 |
+
def read_file(data_path: str):
|
| 31 |
+
df = pd.read_csv(data_path, sep='\t', header=None)
|
| 32 |
+
|
| 33 |
+
df.columns = ['event', 'relation', 'tail']
|
| 34 |
+
print(df.head())
|
| 35 |
+
|
| 36 |
+
event = df['event'].tolist()
|
| 37 |
+
relation = df['relation'].tolist()
|
| 38 |
+
tail = df['tail'].tolist()
|
| 39 |
+
|
| 40 |
+
return event, relation, tail
|
| 41 |
+
|
| 42 |
+
def make_base_dataset(event: List[str], relation: List[str], tail: List[str]):
|
| 43 |
+
new_event, new_relation, new_tail = [], [], []
|
| 44 |
+
knowledge_type = []
|
| 45 |
+
relation_description = []
|
| 46 |
+
|
| 47 |
+
prev_event, prev_relation, prev_tail = None, None, None
|
| 48 |
+
|
| 49 |
+
for i in range(len(event)):
|
| 50 |
+
if i > 0 and event[i] == prev_event and relation[i] == prev_relation:
|
| 51 |
+
new_tail[-1].extend( [tail[i]] )
|
| 52 |
+
else:
|
| 53 |
+
new_event.append(event[i])
|
| 54 |
+
new_relation.append(relation[i])
|
| 55 |
+
# insert knowledge type
|
| 56 |
+
relation_list = []
|
| 57 |
+
for r in list(relations_map.values()):
|
| 58 |
+
relation_list.extend(r)
|
| 59 |
+
if relation[i] not in relation_list:
|
| 60 |
+
raise ValueError(f'dont find match knowledge type named {relation[i]}, please check it!')
|
| 61 |
+
|
| 62 |
+
for k, v in relations_map.items():
|
| 63 |
+
if relation[i] in v:
|
| 64 |
+
knowledge_type.append(k)
|
| 65 |
+
if k == 'social_intercation':
|
| 66 |
+
relation_description.append(social_intercation_2_descriptions[relation[i]])
|
| 67 |
+
elif k == 'physical_entity':
|
| 68 |
+
relation_description.append(physical_entity_2_descriptions[relation[i]])
|
| 69 |
+
elif k == 'event_centered':
|
| 70 |
+
relation_description.append(event_centered_2_descriptions[relation[i]])
|
| 71 |
+
else:
|
| 72 |
+
raise KeyError(f"dont find match relation type named {relation[i]} in dict, please check it!")
|
| 73 |
+
|
| 74 |
+
new_tail.append( [tail[i]] )
|
| 75 |
+
prev_event, prev_relation, prev_tail = event[i], relation[i], tail[i]
|
| 76 |
+
|
| 77 |
+
df = pd.DataFrame({
|
| 78 |
+
'knowledge_type': knowledge_type,
|
| 79 |
+
'event': new_event,
|
| 80 |
+
'relation': new_relation,
|
| 81 |
+
'relation_description': relation_description,
|
| 82 |
+
'tail': new_tail,
|
| 83 |
+
})
|
| 84 |
+
|
| 85 |
+
print(df.head())
|
| 86 |
+
return df
|
| 87 |
+
|
| 88 |
+
def get_dataset(data_path: str):
|
| 89 |
+
event, relation, tail = read_file(data_path=data_path)
|
| 90 |
+
df = make_base_dataset(event=event, relation=relation, tail=tail)
|
| 91 |
+
dataset = datasets.Dataset.from_pandas(df, split='train')
|
| 92 |
+
|
| 93 |
+
print(dataset)
|
| 94 |
+
return dataset
|
| 95 |
+
|
| 96 |
+
def upload_dataset(dataset, repo_id :str, access_token: str, private: bool):
|
| 97 |
+
dataset.push_to_hub(
|
| 98 |
+
repo_id = repo_id,
|
| 99 |
+
private = private,
|
| 100 |
+
token = access_token,
|
| 101 |
+
)
|
| 102 |
+
|
| 103 |
+
if __name__ == '__main__':
|
| 104 |
+
train_dataset = get_dataset('./dataset/train.tsv')
|
| 105 |
+
valid_dataset = get_dataset('./dataset/dev.tsv')
|
| 106 |
+
test_dataset = get_dataset('./dataset/test.tsv')
|
| 107 |
+
|
| 108 |
+
dataset = datasets.DatasetDict({
|
| 109 |
+
'train': train_dataset,
|
| 110 |
+
'validation': valid_dataset,
|
| 111 |
+
'test': test_dataset
|
| 112 |
+
})
|
| 113 |
+
|
| 114 |
+
print(dataset)
|
| 115 |
+
|
| 116 |
+
upload_dataset(dataset, repo_id='Estwld/atomic2020-origin', private=False, access_token='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')
|