Skip to content

Commit

Permalink
Some refactoring: from_tokens -> encode_from_tokens
Browse files Browse the repository at this point in the history
  • Loading branch information
comfyanonymous committed Apr 15, 2023
1 parent 719c26c commit 81d1f00
Show file tree
Hide file tree
Showing 3 changed files with 9 additions and 9 deletions.
10 changes: 5 additions & 5 deletions comfy/sd.py
Original file line number Diff line number Diff line change
Expand Up @@ -375,13 +375,9 @@ def clip_layer(self, layer_idx):
def tokenize(self, text, return_word_ids=False):
return self.tokenizer.tokenize_with_weights(text, return_word_ids)

def encode(self, text, from_tokens=False):
def encode_from_tokens(self, tokens):
if self.layer_idx is not None:
self.cond_stage_model.clip_layer(self.layer_idx)
if from_tokens:
tokens = text
else:
tokens = self.tokenizer.tokenize_with_weights(text)
try:
self.patcher.patch_model()
cond = self.cond_stage_model.encode_token_weights(tokens)
Expand All @@ -391,6 +387,10 @@ def encode(self, text, from_tokens=False):
raise e
return cond

def encode(self, text):
tokens = self.tokenizer.tokenize_with_weights(text)
return self.encode_from_tokens(tokens)

class VAE:
def __init__(self, ckpt_path=None, scale_factor=0.18215, device=None, config=None):
if config is None:
Expand Down
6 changes: 3 additions & 3 deletions comfy/sd1_clip.py
Original file line number Diff line number Diff line change
Expand Up @@ -315,7 +315,7 @@ def tokenize_with_weights(self, text:str, return_word_ids=False):
continue
#parse word
tokens.append([(t, weight) for t in self.tokenizer(word)["input_ids"][1:-1]])

#reshape token array to CLIP input size
batched_tokens = []
batch = [(self.start_token, 1.0, 0)]
Expand All @@ -338,11 +338,11 @@ def tokenize_with_weights(self, text:str, return_word_ids=False):
batch.extend([(pad_token, 1.0, 0)] * (remaining_length))
#start new batch
batch = [(self.start_token, 1.0, 0)]
batched_tokens.append(batch)
batched_tokens.append(batch)
else:
batch.extend([(t,w,i+1) for t,w in t_group])
t_group = []

#fill last batch
batch.extend([(self.end_token, 1.0, 0)] + [(pad_token, 1.0, 0)] * (self.max_length - len(batch) - 1))

Expand Down
2 changes: 1 addition & 1 deletion comfy/sd2_clip.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import sd1_clip
from comfy import sd1_clip
import torch
import os

Expand Down

0 comments on commit 81d1f00

Please sign in to comment.