Skip to content

Commit

Permalink
Merge pull request #980 from guardrails-ai/fix-lc-w-noop
Browse files Browse the repository at this point in the history
Fix Langchain Integration when using No-Op
  • Loading branch information
dtam authored Jul 31, 2024
2 parents 6a44db5 + 4ee0e88 commit 70fa5ac
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 3 deletions.
5 changes: 3 additions & 2 deletions guardrails/integrations/langchain/guard_runnable.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from guardrails.guard import Guard
from guardrails.errors import ValidationError
from guardrails.classes.output_type import OT
from guardrails.classes.validation_outcome import ValidationOutcome


class GuardRunnable(BaseRunnable):
Expand All @@ -12,9 +13,9 @@ def __init__(self, guard: Guard):
self.guard = guard

def _validate(self, input: str) -> OT:
response = self.guard.validate(input)
response: ValidationOutcome[OT] = self.guard.validate(input)
validated_output = response.validated_output
if not validated_output:
if not validated_output or response.validation_passed is False:
raise ValidationError(
(
"The response from the LLM failed validation!"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def guard_runnable():
.use(
RegexMatch("Ice cream", match_type="search", on_fail="refrain"), on="output"
)
.use(ReadingTime(0.05, on_fail="refrain"))
.use(ReadingTime(0.05, on_fail="noop"))
)


Expand Down

0 comments on commit 70fa5ac

Please sign in to comment.