Skip to content

Commit

Permalink
Merge pull request #309 from HazyResearch/bugfix/flash-fill-max-token
Browse files Browse the repository at this point in the history
Bugfix/flash fill max token
  • Loading branch information
seyuboglu authored Mar 18, 2023
2 parents bfd44e0 + bb6de15 commit a0564c6
Showing 1 changed file with 1 addition and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,6 @@ def __init__(
manifest_cache_dir: str = "~/.cache/manifest",
max_tokens: int = 1,
):
self.max_tokens = max_tokens

df = df.view()
if target_column not in df.columns:
df[target_column] = ""
Expand All @@ -41,6 +39,7 @@ def __init__(
os.path.expanduser(manifest_cache_dir)
)
os.makedirs(self.manifest_cache_dir, exist_ok=True)
self.max_tokens = max_tokens

@property
def prompt(self):
Expand Down

0 comments on commit a0564c6

Please # to comment.