curiosity/baseline_reader.py [147:224]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    def __init__(self,
                 tokenizer: Tokenizer = None,
                 mention_tokenizer: Tokenizer = None,
                 token_indexers: Dict[str, TokenIndexer] = None,
                 mention_indexers: Dict[str, TokenIndexer] = None):
        super().__init__()
        self._tokenizer = tokenizer or WordTokenizer()
        self._token_indexers = token_indexers or {
            'tokens': SingleIdTokenIndexer(lowercase_tokens=True),
        }
        self._mention_indexers = mention_indexers or {
            'mentions': SingleIdTokenIndexer(),
        }
        self._mention_tokenizer = mention_tokenizer or WordTokenizer(
            word_splitter=JustSpacesWordSplitter(),
        )
        self._fact_lookup: Optional[Dict[int, Fact]] = None

    @overrides
    def _read(self, file_path: str):
        """
        file_path should point to a curiosity dialog file. In addition,
        the directory that contains that file should also contain the
        sqlite database associated with the dialogs named as below
        - wiki_sql.sqlite.db

        The intent is that there are
        """
        with open(file_path) as f:
            dataset = json.load(f)
            dialogs = dataset['dialogs']

        directory = os.path.dirname(file_path)
        db_path = os.path.join(directory, 'wiki_sql.sqlite.db')
        engine, session = create_sql(db_path)
        facts = (
            session
            .query(Fact)
            .all()
        )
        self._fact_lookup = {f.id: f for f in facts}
        verify_checksum(dataset['db_checksum'], db_path)
        # store = CuriosityStore(db_path)
        # fact_lookup = store.get_fact_lookup()
        # TODO: Add in facts
        for _, d in enumerate(dialogs):
            yield self.text_to_instance(d)

        session.close()

    @overrides
    def text_to_instance(self, dialog: Dict, ignore_fact: bool = False):
        msg_texts = []
        msg_senders = []
        msg_likes = []
        msg_acts = []
        msg_act_mask = []
        msg_facts = []
        msg_fact_labels = []
        metadata_fact_labels = []
        if len(dialog['messages']) == 0:
            raise ValueError('There are no dialog messages')

        known_entities = [
            Token(text='ENTITY/' + t.replace(' ', '_'), idx=idx)
            for idx, t in enumerate(dialog['known_entities'])
        ]
        if len(known_entities) == 0:
            known_entities.append(Token(text='@@YOUKNOWNOTHING@@', idx=0))
        known_entities_field = TextField(known_entities, self._mention_indexers)

        focus_entity = dialog['focus_entity']
        focus_entity_field = TextField(
            [Token(text='ENTITY/' + focus_entity.replace(' ', '_'), idx=0)],
            self._mention_indexers
        )
        prev_msg = ''
        for msg in dialog['messages']:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



curiosity/baseline_reader.py [313:390]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    def __init__(self,
                 tokenizer: Tokenizer = None,
                 mention_tokenizer: Tokenizer = None,
                 token_indexers: Dict[str, TokenIndexer] = None,
                 mention_indexers: Dict[str, TokenIndexer] = None):
        super().__init__()
        self._tokenizer = tokenizer or WordTokenizer()
        self._token_indexers = token_indexers or {
            'tokens': SingleIdTokenIndexer(lowercase_tokens=True),
        }
        self._mention_indexers = mention_indexers or {
            'mentions': SingleIdTokenIndexer(),
        }
        self._mention_tokenizer = mention_tokenizer or WordTokenizer(
            word_splitter=JustSpacesWordSplitter(),
        )
        self._fact_lookup: Optional[Dict[int, Fact]] = None

    @overrides
    def _read(self, file_path: str):
        """
        file_path should point to a curiosity dialog file. In addition,
        the directory that contains that file should also contain the
        sqlite database associated with the dialogs named as below
        - wiki_sql.sqlite.db

        The intent is that there are
        """
        with open(file_path) as f:
            dataset = json.load(f)
            dialogs = dataset['dialogs']

        directory = os.path.dirname(file_path)
        db_path = os.path.join(directory, 'wiki_sql.sqlite.db')
        engine, session = create_sql(db_path)
        facts = (
            session
            .query(Fact)
            .all()
        )
        self._fact_lookup = {f.id: f for f in facts}
        verify_checksum(dataset['db_checksum'], db_path)
        # store = CuriosityStore(db_path)
        # fact_lookup = store.get_fact_lookup()
        # TODO: Add in facts
        for _, d in enumerate(dialogs):
            yield self.text_to_instance(d)

        session.close()

    @overrides
    def text_to_instance(self, dialog: Dict, ignore_fact: bool = False):
        msg_texts = []
        msg_senders = []
        msg_likes = []
        msg_acts = []
        msg_act_mask = []
        msg_facts = []
        msg_fact_labels = []
        metadata_fact_labels = []
        if len(dialog['messages']) == 0:
            raise ValueError('There are no dialog messages')

        known_entities = [
            Token(text='ENTITY/' + t.replace(' ', '_'), idx=idx)
            for idx, t in enumerate(dialog['known_entities'])
        ]
        if len(known_entities) == 0:
            known_entities.append(Token(text='@@YOUKNOWNOTHING@@', idx=0))
        known_entities_field = TextField(known_entities, self._mention_indexers)

        focus_entity = dialog['focus_entity']
        focus_entity_field = TextField(
            [Token(text='ENTITY/' + focus_entity.replace(' ', '_'), idx=0)],
            self._mention_indexers
        )
        prev_msg = ''
        for msg in dialog['messages']:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



