Skip to content

Commit ff2c3ca

Browse files
committed
chore: remove print
1 parent 981b039 commit ff2c3ca

File tree

3 files changed

+0
-16
lines changed

3 files changed

+0
-16
lines changed

semantic_router/encoders/tfidf.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -53,13 +53,11 @@ def _fit_validate(self, routes: List[Route]):
5353
raise TypeError("`routes` parameter must be a list of Route objects.")
5454

5555
def _build_word_index(self, docs: List[str]) -> Dict:
56-
print(docs)
5756
words = set()
5857
for doc in docs:
5958
for word in doc.split():
6059
words.add(word)
6160
word_index = {word: i for i, word in enumerate(words)}
62-
print(word_index)
6361
return word_index
6462

6563
def _compute_tf(self, docs: List[str]) -> np.ndarray:

semantic_router/routers/hybrid.py

Lines changed: 0 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -37,13 +37,10 @@ def __init__(
3737
auto_sync: Optional[str] = None,
3838
alpha: float = 0.3,
3939
):
40-
print("...2.1")
4140
if index is None:
4241
logger.warning("No index provided. Using default HybridLocalIndex.")
4342
index = HybridLocalIndex()
44-
print("...2.2")
4543
encoder = self._get_encoder(encoder=encoder)
46-
print("...2.3")
4744
super().__init__(
4845
encoder=encoder,
4946
llm=llm,
@@ -53,22 +50,17 @@ def __init__(
5350
aggregation=aggregation,
5451
auto_sync=auto_sync,
5552
)
56-
print("...0")
5753
# initialize sparse encoder
5854
self.sparse_encoder = self._get_sparse_encoder(sparse_encoder=sparse_encoder)
59-
print("...5")
6055
# set alpha
6156
self.alpha = alpha
62-
print("...6")
6357
# fit sparse encoder if needed
6458
if (
6559
isinstance(self.sparse_encoder, TfidfEncoder)
6660
and hasattr(self.sparse_encoder, "fit")
6761
and self.routes
6862
):
69-
print("...3")
7063
self.sparse_encoder.fit(self.routes)
71-
print("...4")
7264
# run initialize index now if auto sync is active
7365
if self.auto_sync:
7466
self._init_index_state()
@@ -94,7 +86,6 @@ def add(self, routes: List[Route] | Route):
9486
# TODO: to merge, self._encode should probably output a special
9587
# TODO Embedding type that can be either dense or hybrid
9688
dense_emb, sparse_emb = self._encode(all_utterances)
97-
print(f"{sparse_emb=}")
9889
self.index.add(
9990
embeddings=dense_emb.tolist(),
10091
routes=route_names,
@@ -180,8 +171,6 @@ def _encode(self, text: list[str]) -> tuple[np.ndarray, list[SparseEmbedding]]:
180171
xq_s = self.sparse_encoder(text)
181172
# xq_s = np.squeeze(xq_s)
182173
# convex scaling
183-
print(f"{self.sparse_encoder.__class__.__name__=}")
184-
print(f"_encode: {xq_d.shape=}, {xq_s=}")
185174
xq_d, xq_s = self._convex_scaling(dense=xq_d, sparse=xq_s)
186175
return xq_d, xq_s
187176

@@ -202,7 +191,6 @@ async def _async_encode(
202191
# create dense query vector
203192
xq_d = np.array(dense_vec)
204193
# convex scaling
205-
print(f"_async_encode: {xq_d.shape=}, {xq_s=}")
206194
xq_d, xq_s = self._convex_scaling(dense=xq_d, sparse=xq_s)
207195
return xq_d, xq_s
208196

tests/unit/test_hybrid_layer.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -146,14 +146,12 @@ def test_add_multiple_routes(self, openai_encoder, routes):
146146
assert len(route_layer.routes) == 2, "route_layer.routes is not 2"
147147

148148
def test_query_and_classification(self, openai_encoder, routes):
149-
print("...1")
150149
route_layer = HybridRouter(
151150
encoder=openai_encoder,
152151
sparse_encoder=sparse_encoder,
153152
routes=routes,
154153
auto_sync="local",
155154
)
156-
print("...2")
157155
route_layer.set_threshold(0.0)
158156
query_result = route_layer(UTTERANCES[0])
159157
assert query_result.name in ["Route 1", "Route 2"]

0 commit comments

Comments
 (0)