summaryrefslogtreecommitdiff
path: root/readers/xml_reader.py
blob: 5b2d1fd75770985aaaaea688a17feaaeb2b7117f (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
#  from __future__ import absolute_import
import json
import csv
import parsers, factories
from entities import Person

try:
    import xml.etree.cElementTree as ET
except ImportError:
    import xml.etree.ElementTree as ET

def read_file(path, element_key):
    # get an iterable
    record_counter = 0
    context = ET.iterparse(path, events=("start", "end"))

    # turn it into an iterator
    context = iter(context)

    # get the root element
    event, root = context.__next__()

    # the factory
    inl_factory = factories.INLFactory()
    files = {}
    for event, element in context:
        if 'end' in event:
            if element_key in element.tag:
                # enter the processing here
                record_counter += 1

                #cleaned element is a tree
                inl_parser = parsers.INLXmlParser(element)
                cleaned_element = inl_parser.clearxml()
                entity = inl_factory.get_entity(cleaned_element)

                # test print the entity
                if entity != None:
                    if entity.TYPE not in files:
                        files[entity.TYPE] = open("../out/{}.csv".format(entity.TYPE), 'w+', encoding='utf8')
                    json_entity = entity.to_json()
                    print(json_entity)
                    writer = csv.DictWriter(files[entity.TYPE], entity.CSV_FIELDS)
                    writer.writerow(entity.to_csv_dict())
                    # json.dump(entity.comments_list, f667, indent=2, ensure_ascii=False)
                    # json.dump(entity.bio_data, f678, indent=2, ensure_ascii=False)

                    # entity.print_entity()

                # TODO analys and upload the entity


                # import pdb; pdb.set_trace()
                print(record_counter, cleaned_element.getroot().tag, '@@@', cleaned_element.getroot().attrib, '@@@',
                      cleaned_element.getroot().text)
                element.clear()
    print(record_counter)


if __name__ == '__main__':
    read_file(r"../../NLI-nnl10.xml", 'record')