-
Notifications
You must be signed in to change notification settings - Fork 18
Expand file tree
/
Copy path__init__.py
More file actions
executable file
·1548 lines (1371 loc) · 58.4 KB
/
__init__.py
File metadata and controls
executable file
·1548 lines (1371 loc) · 58.4 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
"""
Greynir: Natural language processing for Icelandic
Queries module
Copyright (C) 2023 Miðeind ehf.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see http://www.gnu.org/licenses/.
This module implements a query processor that operates on queries
in the form of parse trees and returns the results requested,
if the query is valid and understood.
"""
from typing import (
ChainMap as ChainMapType,
DefaultDict,
Optional,
Sequence,
Set,
Tuple,
List,
Dict,
Callable,
Iterator,
Iterable,
Union,
Any,
Mapping,
cast,
)
from typing_extensions import Protocol, Literal
from types import FunctionType, ModuleType
import importlib
import logging
from datetime import datetime, timedelta, timezone
import json
import re
import random
from collections import defaultdict, ChainMap
from tokenizer import BIN_Tuple, detokenize
from reynir import TOK, Tok, tokenize
from reynir.fastparser import (
Fast_Parser,
ParseForestDumper,
ParseError,
ffi, # type: ignore
)
from reynir.binparser import BIN_Grammar, BIN_Token
from reynir.reducer import Reducer
from reynir.bindb import GreynirBin
from reynir.grammar import GrammarError
from islenska.bindb import BinFilterFunc
from settings import Settings
from queries.util import read_grammar_file
from db import SessionContext, Session, desc
from db.models import Query as QueryRow, QueryClientData, QueryLog
from tree import ProcEnv, Tree, TreeStateDict, Node
# from nertokenizer import recognize_entities
from images import get_image_url
from utility import QUERIES_DIR, modules_in_dir, QUERIES_UTIL_DIR
from geo import LatLonTuple
# Query response
ResponseDict = Dict[str, Any]
ResponseMapping = Mapping[str, Any]
ResponseType = Union[ResponseDict, List[ResponseDict]]
# Query context
ContextDict = Dict[str, Any]
# Client data
ClientDataDict = Dict[str, Union[str, int, float, bool, Dict[str, str]]]
# Answer tuple (corresponds to parameter list of Query.set_answer())
AnswerTuple = Tuple[ResponseType, str, Optional[str]]
LookupFunc = Callable[[str], Tuple[str, List[BIN_Tuple]]]
HelpFunc = Callable[[str], str]
class QueryStateDict(TreeStateDict):
query: "Query"
names: Dict[str, str]
class CastFunc(Protocol):
def __call__(self, w: str, *, filter_func: Optional[BinFilterFunc] = None) -> str:
...
# The grammar root nonterminal for queries; see Greynir.grammar in GreynirEngine
QUERY_GRAMMAR_ROOT = "QueryRoot"
# A fixed preamble that is inserted before the concatenated query grammar fragments
_QUERY_ROOT_GRAMMAR = read_grammar_file("root")
# Query prefixes that we cut off before further processing
# The 'bæjarblað'/'hæðarblað' below is a common misunderstanding by the Google ASR
_IGNORED_QUERY_PREFIXES = (
"embla",
"hæ embla",
"hey embla",
"sæl embla",
"bæjarblað",
"hæðarblað",
)
_IGNORED_PREFIX_RE = re.compile(
r"^({0})\s*".format("|".join(_IGNORED_QUERY_PREFIXES)), flags=re.IGNORECASE
)
# Auto-capitalization corrections
_CAPITALIZATION_REPLACEMENTS = (("í Dag", "í dag"),)
def _now() -> datetime:
"""Return the current time in UTC"""
return datetime.now(timezone.utc)
def beautify_query(query: str) -> str:
"""Return a minimally beautified version of the given query string"""
# Make sure the query starts with an uppercase letter
bq = (query[0].upper() + query[1:]) if query else ""
# Add a question mark if no other ending punctuation is present
if not any(bq.endswith(s) for s in ("?", ".", "!")):
bq += "?"
return bq
class QueryGrammar(BIN_Grammar):
"""A subclass of BIN_Grammar that reads its input from
strings obtained from query handler plug-ins in the
queries subdirectory, prefixed by a preamble"""
def __init__(self) -> None:
super().__init__()
# Enable the 'include_queries' condition
self.set_conditions({"include_queries"})
@classmethod
def is_grammar_modified(cls) -> bool:
"""Override inherited function to specify that query grammars
should always be reparsed, since the set of plug-in query
handlers may have changed, as well as their grammar fragments."""
return True
def read(
self, fname: str, verbose: bool = False, binary_fname: Optional[str] = None
) -> None:
"""Overrides the inherited read() function to supply grammar
text from a file as well as additional grammar fragments
from query processor modules."""
def grammar_generator() -> Iterator[str]:
"""A generator that yields a grammar file, line-by-line,
followed by grammar additions coming from a string
that has been coalesced from grammar fragments in query
processor modules."""
with open(fname, "r", encoding="utf-8") as inp:
# Read grammar file line-by-line
for line in inp:
yield line
# Yield the query grammar preamble
grammar_preamble = _QUERY_ROOT_GRAMMAR.split("\n")
for line in grammar_preamble:
yield line
# Yield grammar additions from plug-ins, if any
grammar_additions = QueryParser.grammar_additions().split("\n")
for line in grammar_additions:
yield line
try:
# Note that if Settings.DEBUG is True, we always write a fresh
# binary grammar file, regardless of file timestamps. This helps
# in query development, as query grammar fragment strings may change
# without any .grammar source file change (which is the default
# trigger for generating new binary grammar files).
self.read_from_generator(
fname,
grammar_generator(),
verbose,
binary_fname,
force_new_binary=Settings.DEBUG,
)
except (IOError, OSError):
raise GrammarError("Unable to open or read grammar file", fname, 0)
class QueryParser(Fast_Parser):
"""A subclass of Fast_Parser, specialized to parse queries"""
# Override the punctuation that is understood by the parser,
# adding the forward slash ('/')
_UNDERSTOOD_PUNCTUATION = BIN_Token._UNDERSTOOD_PUNCTUATION + "+/"
_GRAMMAR_BINARY_FILE = Fast_Parser._GRAMMAR_FILE + ".query.bin"
# Keep a separate grammar class instance and time stamp for
# QueryParser. This Python sleight-of-hand overrides
# class attributes that are defined in BIN_Parser, see binparser.py.
_grammar_ts: Optional[float] = None
_grammar = None
_grammar_class = QueryGrammar
# Also keep separate class instances of the C grammar and its timestamp
_c_grammar: Any = cast(Any, ffi).NULL
_c_grammar_ts: Optional[float] = None
# Store the grammar additions for queries
# (these remain constant for all query parsers, so there is no
# need to store them per-instance)
_grammar_additions = ""
def __init__(self, grammar_additions: str) -> None:
QueryParser._grammar_additions = grammar_additions
super().__init__(verbose=False, root=QUERY_GRAMMAR_ROOT)
@classmethod
def grammar_additions(cls) -> str:
return cls._grammar_additions
class QueryTree(Tree):
"""Extend the tree.Tree class to collect all child families of the
Query nonterminal from a query parse forest"""
def __init__(self):
super().__init__()
self._query_trees: List[Node] = []
def handle_O(self, n: int, s: str) -> None:
"""Handle the O (option) tree record"""
assert n == 1
def handle_Q(self, n: int) -> None:
"""Handle the Q (final) tree record"""
super().handle_Q(n)
# Access the QueryRoot node
root = self.s[1]
# Access the Query node
query = None if root is None else root.child
# The child nodes of the Query node are the valid query parse trees
self._query_trees = [] if query is None else list(query.children())
@property
def query_trees(self) -> List[Node]:
"""Returns the list of valid query parse trees, i.e. child nodes of Query"""
return self._query_trees
@property
def query_nonterminals(self) -> Set[str]:
"""Return the set of query nonterminals that match this query"""
return set(node.string_self() for node in self._query_trees)
def process_queries(
self, query: "Query", session: Session, processor: ProcEnv
) -> bool:
"""Process all query trees that the given processor is interested in"""
processor_query_types: Set[str] = processor.get("QUERY_NONTERMINALS", set())
# Every tree processor must be interested in at least one query type
assert isinstance(processor_query_types, set)
# For development, we allow processors to be disinterested in any query
# assert len(processor_query_types) > 0
if self.query_nonterminals.isdisjoint(processor_query_types):
# But this processor is not interested in any of the nonterminals
# in this query's parse forest: don't waste more cycles on it
return False
with self.context(session, processor, query=query) as state:
for query_tree in self._query_trees:
# Is the processor interested in the root nonterminal
# of this query tree?
if query_tree.string_self() in processor_query_types:
# Hand the query tree over to the processor
self.process_sentence(state, query_tree)
if query.has_answer():
# The processor successfully answered the query: We're done
return True
return False
class Query:
"""A Query is initialized by parsing a query string using QueryRoot as the
grammar root nonterminal. The Query can then be executed by processing
the best parse tree using the nonterminal handlers given above, returning a
result object if successful."""
# Processors that handle parse trees
_tree_processors: List[ProcEnv] = []
# Functions from utility modules,
# facilitating code reuse between query modules
_utility_functions: ChainMapType[str, FunctionType] = ChainMap()
# Handler functions within processors that handle plain text
_text_processors: List[Callable[["Query"], bool]] = []
# Handler of last resort for queries that no processor handles
_last_resort_processor: Optional[Callable[["Query"], bool]] = None
# Singleton instance of the query parser
_parser: Optional[QueryParser] = None
# Help texts associated with lemmas
_help_texts: Dict[str, List[HelpFunc]] = dict()
def __init__(
self,
session: Session, # SQLAlchemy session
query: str,
voice: bool,
auto_uppercase: bool,
location: Optional[LatLonTuple],
client_id: Optional[str],
client_type: Optional[str],
client_version: Optional[str],
authenticated: bool = False,
private: bool = False,
) -> None:
self._query = q = self._preprocess_query_string(query)
self._session = session
self._location = location
# Prepare a "beautified query" string that can be
# shown in a client user interface. By default, this
# starts with an uppercase letter and ends with a
# question mark, but this can be modified during the
# processing of the query.
self.set_beautified_query(beautify_query(q))
# Boolean flag for whether this is a voice query
self._voice = voice
# Voice synthesizer ID, if any
self._voice_id: Optional[str] = None
# Voice synthesizer locale
self._voice_locale: str = "is_IS"
self._auto_uppercase = auto_uppercase
self._error: Optional[str] = None
# A detailed answer, which can be a list or a dict
self._response: Optional[ResponseType] = None
# A single "best" displayable text answer
self._answer: Optional[str] = None
# A version of self._answer that can be
# fed to a voice synthesizer
self._voice_answer: Optional[str] = None
self._tree: Optional[QueryTree] = None
self._qtype: Optional[str] = None
self._key: Optional[str] = None
self._toklist: Optional[List[Tok]] = None
# Expiration timestamp, if any
self._expires: Optional[datetime] = None
# URL assocated with query, can be set by query response handler
# and subsequently provided to the remote client
self._url: Optional[str] = None
# Command returned by query
self._command: Optional[str] = None
# Image URL returned by query
self._image: Optional[str] = None
# Client id, if known
self._client_id = client_id
# Client type, if known
self._client_type = client_type
# Client version, if known
self._client_version = client_version
# Boolean flag indicating whether the client is authenticated
self._authenticated = authenticated
# Boolean flag indicating whether the query is private
self._private = private
# Source of answer to query
self._source: Optional[str] = None
# Query context, which is None until fetched via self.fetch_context()
# This should be a dict that can be represented in JSON
self._context: Optional[ContextDict] = None
def _preprocess_query_string(self, q: str) -> str:
"""Preprocess the query string prior to further analysis"""
# Note: Whitespace, periods, question marks, and exclamation marks
# have already been stripped off the end of the query string
if not q:
return q
# Strip prefixes such as Embla's name, "Hæ Embla", etc.
qf = re.sub(_IGNORED_PREFIX_RE, "", q)
# Fix common Google ASR mistake: 'hæ embla' is returned as 'bæjarblað'
if not qf and q == "bæjarblað":
q = "hæ embla"
# Remove any trailing punctuation
qf = re.sub(r"[\.\?\!]+$", "", qf)
# If stripping the prefixes results in an empty query,
# just return original query string, stripped but otherwise unmodified
return qf or q
@classmethod
def init_class(cls) -> None:
"""Initialize singleton data, i.e. the list of query
processor modules and the query parser instance"""
all_procs: List[ModuleType] = []
tree_procs: List[Tuple[int, ModuleType]] = []
text_procs: List[Tuple[int, Callable[["Query"], bool]]] = []
last_resort_proc: Optional[Callable[["Query"], bool]] = None
# Load the query processor modules found in the
# queries directory. The modules can be tree and/or text processors,
# and we sort them into two lists, accordingly.
modnames = modules_in_dir(QUERIES_DIR)
for modname in sorted(modnames):
try:
m = importlib.import_module(modname)
is_proc = False
# Obtain module priority, if any
# It can be a number or the string "LAST_RESORT"
priority: Union[int, Literal["LAST_RESORT"]] = getattr(m, "PRIORITY", 0)
if priority == "LAST_RESORT":
# This is a last-resort query processor
# (i.e. it is invoked if no other processor
# is able to handle the query)
if last_resort_proc is not None:
logging.error(
f"Module {modname} has PRIORITY set to 'LAST_RESORT', "
"but another module already has this priority"
)
continue
last_resort_proc = getattr(m, "handle_plain_text", None)
if last_resort_proc is None:
logging.error(
f"Module {modname} has PRIORITY set to 'LAST_RESORT', "
"but does not define handle_plain_text()"
)
continue
if getattr(m, "HANDLE_TREE", False):
# This is a tree processor
is_proc = True
tree_procs.append((priority, m))
handle_plain_text = getattr(m, "handle_plain_text", None)
if handle_plain_text is not None:
# This is a text processor:
# store a reference to its handler function
is_proc = True
text_procs.append((priority, handle_plain_text))
if is_proc:
all_procs.append(m)
except ImportError as e:
logging.error(f"Error importing query processor module {modname}: {repr(e)}")
except Exception as e:
logging.error(f"Error initializing query processor module {modname}: {repr(e)}")
# Sort the processors by descending priority
# so that the higher-priority ones get invoked bfore the lower-priority ones
# We create a ChainMap (processing environment) for each tree processor,
# containing the processors attributes with the utility modules as a fallback
cls._tree_processors = [
cls.create_processing_env(t[1])
for t in sorted(tree_procs, key=lambda x: -x[0])
]
cls._text_processors = [t[1] for t in sorted(text_procs, key=lambda x: -x[0])]
cls._last_resort_processor = last_resort_proc
if Settings.DEBUG:
# Print the active processors in descending priority order
print("Text processors:")
print(
"\n".join(
f"{p[0]:4} -> {p[1].__module__}.{p[1].__qualname__}"
for p in sorted(text_procs, key=lambda x: -x[0])
)
)
print("Tree processors:")
print(
"\n".join(
f"{p[0]:4} -> {p[1].__name__}"
for p in sorted(tree_procs, key=lambda x: -x[0])
)
)
if last_resort_proc is not None:
print("Last resort processor:")
p = last_resort_proc
print(f" {p.__module__}.{p.__qualname__}")
# Obtain query grammar fragments from the utility modules and tree processors
grammar_fragments: List[str] = []
# Load utility modules
modnames = modules_in_dir(QUERIES_UTIL_DIR)
for modname in sorted(modnames):
try:
um = importlib.import_module(modname)
exported = vars(um) # Get all exported values from module
# Pop grammar fragment, if any
fragment = exported.pop("GRAMMAR", None)
if fragment and isinstance(fragment, str):
# This utility module has a grammar fragment,
# and probably corresponding nonterminal functions
# We add the grammar fragment to our grammar
grammar_fragments.append(fragment)
# and the nonterminal functions to the shared functions ChainMap,
# ignoring non-callables and underscore (private) attributes
cls._utility_functions.update(
(
(k, v)
for k, v in exported.items()
if callable(v) and not k.startswith("_")
)
)
except ImportError as e:
logging.error(f"Error importing utility module {modname}: {repr(e)}")
except Exception as e:
logging.error(f"Error initializing utility module {modname}: {repr(e)}")
for processor in cls._tree_processors:
# Check whether this tree processor supplies a query grammar fragment
fragment = processor.pop("GRAMMAR", None)
if fragment and isinstance(fragment, str):
# Looks legit: add it to our list
grammar_fragments.append(fragment)
# Collect topic lemmas that can be used to provide
# context-sensitive help texts when queries cannot be parsed
help_texts: DefaultDict[str, List[HelpFunc]] = defaultdict(list)
for processor in all_procs:
# Collect topic lemmas and corresponding help text functions
topic_lemmas = getattr(processor, "TOPIC_LEMMAS", None)
if topic_lemmas:
help_text_func = getattr(processor, "help_text", None)
# If topic lemmas are given, a help_text function
# should also be present
assert help_text_func is not None
if help_text_func is not None:
for lemma in topic_lemmas:
help_texts[lemma].append(help_text_func)
cls._help_texts = help_texts
# Coalesce the grammar additions from the fragments
grammar_additions = "\n".join(grammar_fragments)
# Initialize a singleton parser instance for queries,
# with the nonterminal 'QueryRoot' as the grammar root
cls._parser = QueryParser(grammar_additions)
@staticmethod
def create_processing_env(processor: ModuleType) -> ProcEnv:
"""
Create a new child of the utility functions ChainMap.
Returns a mapping suitable for parsing query trees,
where the current processor's functions are prioritized over
the shared utility module functions.
"""
return Query._utility_functions.new_child(vars(processor))
@staticmethod
def _parse(toklist: Iterable[Tok]) -> Tuple[ResponseDict, Dict[int, str]]:
"""Parse a token list as a query"""
bp = Query._parser
assert bp is not None
num_sent = 0
num_parsed_sent = 0
rdc = Reducer(bp.grammar)
trees: Dict[int, str] = dict()
sent: List[Tok] = []
for t in toklist:
if t[0] == TOK.S_BEGIN:
if num_sent > 0:
# A second sentence is beginning: this is not valid for a query
raise ParseError("A query cannot contain more than one sentence")
sent = []
elif t[0] == TOK.S_END:
slen = len(sent)
if not slen:
continue
num_sent += 1
# Parse the accumulated sentence
num = 0
try:
# Parse the sentence
forest = bp.go(sent)
num = Fast_Parser.num_combinations(forest)
if num > 1:
# Reduce the resulting forest
forest = rdc.go(forest)
except ParseError:
forest = None
num = 0
if num > 0:
num_parsed_sent += 1
# Obtain a text representation of the parse tree
assert forest is not None
trees[num_sent] = ParseForestDumper.dump_forest(forest)
elif t[0] == TOK.P_BEGIN:
pass
elif t[0] == TOK.P_END:
pass
else:
sent.append(t)
result: ResponseDict = dict(num_sent=num_sent, num_parsed_sent=num_parsed_sent)
return result, trees
@staticmethod
def _query_string_from_toklist(toklist: Iterable[Tok]) -> str:
"""Re-create a query string from an auto-capitalized token list"""
actual_q = detokenize(toklist, normalize=True)
if actual_q:
# Fix stuff that the auto-capitalization tends to get wrong,
# such as 'í Dag'
for wrong, correct in _CAPITALIZATION_REPLACEMENTS:
actual_q = actual_q.replace(wrong, correct)
# Capitalize the first letter of the query
actual_q = actual_q[0].upper() + actual_q[1:]
# Terminate the query with a question mark,
# if not otherwise terminated
if not actual_q.endswith(("?", ".", "!")):
actual_q += "?"
return actual_q
def parse(self, result: ResponseDict) -> bool:
"""Parse the query from its string, returning True if valid"""
self._tree = None # Erase previous tree, if any
self._error = None # Erase previous error, if any
self._qtype = None # Erase previous query type, if any
self._key = None
self._toklist = None
q = self._query
if not q:
self.set_error("E_EMPTY_QUERY")
return False
# Tokenize and auto-capitalize the query string, without multiplying numbers together
toklist = list(
tokenize(
q,
auto_uppercase=self._auto_uppercase and q.islower(),
no_multiply_numbers=True,
)
)
actual_q = self._query_string_from_toklist(toklist)
# Update the beautified query string, as the actual_q string
# probably has more correct capitalization
self.set_beautified_query(actual_q)
# TODO: We might want to re-tokenize the actual_q string with
# auto_uppercase=False, since we may have fixed capitalization
# errors in _query_string_from_toklist()
if Settings.DEBUG:
# Log the query string as seen by the parser
print(f"Query is: '{actual_q}'")
try:
parse_result, trees = Query._parse(toklist)
except ParseError:
self.set_error("E_PARSE_ERROR")
return False
if not trees:
# No parse at all
self.set_error("E_NO_PARSE_TREES")
return False
if parse_result["num_sent"] != 1:
# Queries must be one sentence
self.set_error("E_MULTIPLE_SENTENCES")
return False
if parse_result["num_parsed_sent"] != 1:
# Unable to parse the single sentence
self.set_error("E_NO_PARSE")
return False
if 1 not in trees:
# No sentence number 1
self.set_error("E_NO_FIRST_SENTENCE")
return False
# Looks good
# Store the resulting parsed query as a tree
tree_string = "S1\n" + trees[1]
# if Settings.DEBUG:
# print(tree_string)
self._tree = QueryTree()
self._tree.load(tree_string)
# Store the token list
self._toklist = toklist
return True
def execute_from_plain_text(self) -> bool:
"""Attempt to execute a plain text query, without having to parse it"""
if not self._query:
return False
# Call the handle_plain_text() function in each text processor,
# until we find one that returns True, or return False otherwise
return any(
handle_plain_text(self) for handle_plain_text in self._text_processors
)
def execute_from_tree(self) -> bool:
"""Execute the query or queries contained in the previously parsed tree;
return True if successful"""
if self._tree is None:
self.set_error("E_QUERY_NOT_PARSED")
return False
# Try each tree processor in turn, in priority order (highest priority first)
for processor in self._tree_processors:
self._error = None
self._qtype = None
# Process the tree, which has only one sentence, but may
# have multiple matching query nonterminals
# (children of Query in the grammar)
try:
# Note that passing query=self here means that the
# "query" field of the TreeStateDict is populated,
# turning it into a QueryStateDict.
if self._tree.process_queries(
self,
self._session,
processor,
):
# This processor found an answer, which is already stored
# in the Query object: return True
return True
except Exception as e:
logging.error(
f"Exception in execute_from_tree('{processor.get('__name__', 'UNKNOWN')}') "
f"for query '{self._query}': {repr(e)}"
)
# No processor was able to answer the query
return False
def has_answer(self) -> bool:
"""Return True if the query currently has an answer"""
return bool(self._answer) and self._error is None
def last_answer(self, *, within_minutes: int = 5) -> Optional[Tuple[str, str]]:
"""Return the last answer given to this client, by default
within the last 5 minutes (0=forever)"""
if not self._client_id:
# Can't find the last answer if no client_id given
return None
# Find the newest non-error, no-repeat query result for this client
q = (
self._session.query(QueryRow.answer, QueryRow.voice)
.filter(QueryRow.client_id == self._client_id)
.filter(QueryRow.qtype != "Repeat")
.filter(QueryRow.error == None)
)
if within_minutes > 0:
# Apply a timestamp filter
since = _now() - timedelta(minutes=within_minutes)
q = q.filter(QueryRow.timestamp >= since)
# Sort to get the newest query that fulfills the criteria
last = q.order_by(desc(QueryRow.timestamp)).limit(1).one_or_none()
return None if last is None else (last[0], last[1])
def fetch_context(self, *, within_minutes: int = 10) -> Optional[ContextDict]:
"""Return the context from the last answer given to this client,
by default within the last 10 minutes (0=forever)"""
if not self._client_id:
# Can't find the last answer if no client_id given
return None
# Find the newest non-error, no-repeat query result for this client
q = (
self._session.query(QueryRow.context)
.filter(QueryRow.client_id == self._client_id)
.filter(QueryRow.qtype != "Repeat")
.filter(QueryRow.error == None)
)
if within_minutes > 0:
# Apply a timestamp filter
since = _now() - timedelta(minutes=within_minutes)
q = q.filter(QueryRow.timestamp >= since)
# Sort to get the newest query that fulfills the criteria
ctx = cast(
Optional[Sequence[ContextDict]],
q.order_by(desc(QueryRow.timestamp)).limit(1).one_or_none(),
)
# This function normally returns a dict that has been decoded from JSON
return None if ctx is None else ctx[0]
def count_queries_of_type(self, qtype: str) -> int:
"""Return the number of queries by this client of the given type"""
if not self._client_id:
# Can't find the last answer if no client_id given
return 0
# Count the non-error query results for this client and query type
return (
self._session.query(QueryRow.id)
.filter(QueryRow.client_id == self._client_id)
.filter(QueryRow.qtype == qtype)
.filter(QueryRow.error == None)
.count()
)
@property
def query(self) -> str:
"""The query text, in its original form"""
return self._query
@property
def query_lower(self) -> str:
"""The query text, all lower case"""
return self._query.lower()
@property
def beautified_query(self) -> str:
"""Return the query string that will be reflected back to the client"""
return self._beautified_query
def set_beautified_query(self, q: str) -> None:
"""Set the query string that will be reflected back to the client"""
self._beautified_query = (
q.replace("embla", "Embla")
.replace("miðeind", "Miðeind")
.replace("Guðni Th ", "Guðni Th. ") # By presidential request :)
)
def lowercase_beautified_query(self) -> None:
"""If we know that no uppercase words occur in the query,
except the initial capital, this function can be called
to adjust the beautified query string accordingly."""
self.set_beautified_query(self._beautified_query.capitalize())
def query_is_command(self) -> None:
"""Called from a query processor if the query is a command, not a question"""
# Put a period at the end of the beautified query text
# instead of a question mark
if self._beautified_query.endswith("?"):
self._beautified_query = self._beautified_query[:-1] + "."
@property
def expires(self) -> Optional[datetime]:
"""Expiration time stamp for this query answer, if any"""
return self._expires
def set_expires(self, ts: datetime) -> None:
"""Set an expiration time stamp for this query answer"""
self._expires = ts
@property
def url(self) -> Optional[str]:
"""URL answer associated with this query"""
return self._url
def set_url(self, u: Optional[str]) -> None:
"""Set the URL answer associated with this query"""
self._url = u
@property
def command(self) -> Optional[str]:
"""JavaScript command associated with this query"""
return self._command
def set_command(self, c: str) -> None:
"""Set the JavaScript command associated with this query"""
self._command = c
@property
def image(self) -> Optional[str]:
"""Image URL associated with this query"""
return self._image
def set_image(self, url: str) -> None:
"""Set the image URL command associated with this query"""
self._image = url
@property
def source(self) -> Optional[str]:
"""Return the source of the answer to this query"""
return self._source
def set_source(self, s: str) -> None:
"""Set the source for the answer to this query"""
self._source = s
@property
def location(self) -> Optional[LatLonTuple]:
"""The client location, if known, as a (lat, lon) tuple"""
return self._location
@property
def token_list(self) -> Optional[List[Tok]]:
"""The original token list for the query"""
return self._toklist
def qtype(self) -> Optional[str]:
"""Return the query type"""
return self._qtype
def set_qtype(self, qtype: str) -> None:
"""Set the query type ('Person', 'Title', 'Company', 'Entity'...)"""
self._qtype = qtype
def set_answer(
self, response: ResponseType, answer: str, voice_answer: Optional[str] = None
) -> None:
"""Set the answer to the query"""
# Detailed response (this is usually a dict)
self._response = response
# Single best answer, as a displayable string
self._answer = answer
# A voice version of the single best answer
self._voice_answer = voice_answer
def set_key(self, key: str) -> None:
"""Set the query key, i.e. the term or string used to execute the query"""
# This is for instance a person name in nominative case
self._key = key
def set_error(self, error: str) -> None:
"""Set an error result"""
self._error = error
def set_voice_id(self, voice_id: str) -> None:
"""Set the voice ID"""
self._voice_id = voice_id
def set_voice_locale(self, voice_locale: str) -> None:
"""Set voice locale (e.g. 'is_IS', 'en_US', etc.)"""
self._voice_locale = voice_locale
@property
def is_voice(self) -> bool:
"""Return True if this is a voice query"""
return self._voice
@property
def client_id(self) -> Optional[str]:
return self._client_id
@property
def client_type(self) -> Optional[str]:
"""Return client type string, e.g. "ios", "android", "www", etc."""
return self._client_type
@property
def client_version(self) -> Optional[str]:
"""Return client version string, e.g. "1.0.3" """
return self._client_version
@property
def authenticated(self) -> bool:
"""Return True if the query is authenticated, i.e.
contains a bearer token from the client"""
return self._authenticated
@property
def private(self) -> bool:
"""Return True if the query is private"""
return self._private
def response(self) -> Optional[ResponseType]:
"""Return the detailed query answer"""
return self._response
def answer(self) -> Optional[str]:
"""Return the 'single best' displayable query answer"""
return self._answer
def voice_answer(self) -> str:
"""Return a voice version of the 'single best' answer, if any"""
return self._voice_answer or ""
def key(self) -> Optional[str]:
"""Return the query key"""
return self._key
def error(self) -> Optional[str]:
"""Return the query error, if any"""
return self._error
@property
def context(self) -> Optional[ContextDict]:
"""Return the context that has been set by self.set_context()"""
return self._context
def set_context(self, ctx: ContextDict) -> None:
"""Set a query context that will be stored and made available
to the next query from the same client"""
self._context = ctx
def client_data(self, key: str) -> Optional[ClientDataDict]:
"""Fetch client_id-associated data stored in the querydata table"""
if not self.client_id:
return None