@@ -37,13 +37,10 @@ def __init__(
37
37
auto_sync : Optional [str ] = None ,
38
38
alpha : float = 0.3 ,
39
39
):
40
- print ("...2.1" )
41
40
if index is None :
42
41
logger .warning ("No index provided. Using default HybridLocalIndex." )
43
42
index = HybridLocalIndex ()
44
- print ("...2.2" )
45
43
encoder = self ._get_encoder (encoder = encoder )
46
- print ("...2.3" )
47
44
super ().__init__ (
48
45
encoder = encoder ,
49
46
llm = llm ,
@@ -53,22 +50,17 @@ def __init__(
53
50
aggregation = aggregation ,
54
51
auto_sync = auto_sync ,
55
52
)
56
- print ("...0" )
57
53
# initialize sparse encoder
58
54
self .sparse_encoder = self ._get_sparse_encoder (sparse_encoder = sparse_encoder )
59
- print ("...5" )
60
55
# set alpha
61
56
self .alpha = alpha
62
- print ("...6" )
63
57
# fit sparse encoder if needed
64
58
if (
65
59
isinstance (self .sparse_encoder , TfidfEncoder )
66
60
and hasattr (self .sparse_encoder , "fit" )
67
61
and self .routes
68
62
):
69
- print ("...3" )
70
63
self .sparse_encoder .fit (self .routes )
71
- print ("...4" )
72
64
# run initialize index now if auto sync is active
73
65
if self .auto_sync :
74
66
self ._init_index_state ()
@@ -94,7 +86,6 @@ def add(self, routes: List[Route] | Route):
94
86
# TODO: to merge, self._encode should probably output a special
95
87
# TODO Embedding type that can be either dense or hybrid
96
88
dense_emb , sparse_emb = self ._encode (all_utterances )
97
- print (f"{ sparse_emb = } " )
98
89
self .index .add (
99
90
embeddings = dense_emb .tolist (),
100
91
routes = route_names ,
@@ -180,8 +171,6 @@ def _encode(self, text: list[str]) -> tuple[np.ndarray, list[SparseEmbedding]]:
180
171
xq_s = self .sparse_encoder (text )
181
172
# xq_s = np.squeeze(xq_s)
182
173
# convex scaling
183
- print (f"{ self .sparse_encoder .__class__ .__name__ = } " )
184
- print (f"_encode: { xq_d .shape = } , { xq_s = } " )
185
174
xq_d , xq_s = self ._convex_scaling (dense = xq_d , sparse = xq_s )
186
175
return xq_d , xq_s
187
176
@@ -202,7 +191,6 @@ async def _async_encode(
202
191
# create dense query vector
203
192
xq_d = np .array (dense_vec )
204
193
# convex scaling
205
- print (f"_async_encode: { xq_d .shape = } , { xq_s = } " )
206
194
xq_d , xq_s = self ._convex_scaling (dense = xq_d , sparse = xq_s )
207
195
return xq_d , xq_s
208
196
0 commit comments