Update README.md
cd2dac3
verified
-
CSD
init
-
examples
README tutorial.
-
1.52 kB
initial commit
-
37 Bytes
example usage
-
10.3 kB
Update README.md
-
62 Bytes
Push model using huggingface_hub.
csd-vit-l.pth
Detected Pickle imports (7)
- "numpy.dtype",
- "torch._utils._rebuild_tensor_v2",
- "argparse.Namespace",
- "collections.OrderedDict",
- "_codecs.encode",
- "torch.FloatStorage",
- "numpy.core.multiarray.scalar"
How to fix it?
2.44 GB
init
-
611 MB
tensors
-
610 MB
tensors
-
610 MB
tensors
csd_clip_model_torchscript.pt
Detected Pickle imports (235)
- "__torch__.clip.model.___torch_mangle_533.LayerNorm",
- "__torch__.___torch_mangle_670.CSD_CLIP",
- "__torch__.torch.nn.modules.activation.___torch_mangle_541.MultiheadAttention",
- "__torch__.clip.model.___torch_mangle_629.ResidualAttentionBlock",
- "__torch__.clip.model.___torch_mangle_526.QuickGELU",
- "__torch__.torch.nn.modules.linear.___torch_mangle_518.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_507.Linear",
- "__torch__.clip.model.___torch_mangle_634.QuickGELU",
- "__torch__.torch.nn.modules.activation.___torch_mangle_469.MultiheadAttention",
- "__torch__.torch.nn.modules.activation.___torch_mangle_577.MultiheadAttention",
- "__torch__.clip.model.___torch_mangle_583.LayerNorm",
- "__torch__.clip.model.___torch_mangle_593.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.container.___torch_mangle_618.Sequential",
- "__torch__.clip.model.___torch_mangle_605.LayerNorm",
- "__torch__.clip.model.___torch_mangle_511.LayerNorm",
- "__torch__.clip.model.___torch_mangle_643.QuickGELU",
- "__torch__.clip.model.___torch_mangle_544.QuickGELU",
- "__torch__.torch.nn.modules.linear.___torch_mangle_482.Linear",
- "__torch__.clip.model.___torch_mangle_485.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.linear.___torch_mangle_621.NonDynamicallyQuantizableLinear",
- "__torch__.clip.model.___torch_mangle_503.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.linear.___torch_mangle_581.Linear",
- "__torch__.clip.model.___torch_mangle_667.Transformer",
- "__torch__.clip.model.___torch_mangle_619.LayerNorm",
- "__torch__.clip.model.___torch_mangle_476.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.linear.___torch_mangle_608.Linear",
- "__torch__.torch.nn.modules.activation.___torch_mangle_523.MultiheadAttention",
- "__torch__.clip.model.___torch_mangle_578.LayerNorm",
- "__torch__.clip.model.___torch_mangle_502.LayerNorm",
- "__torch__.torch.nn.modules.container.___torch_mangle_474.Sequential",
- "__torch__.torch.nn.modules.linear.___torch_mangle_516.Linear",
- "__torch__.clip.model.___torch_mangle_664.LayerNorm",
- "__torch__.clip.model.___torch_mangle_655.LayerNorm",
- "__torch__.clip.model.___torch_mangle_481.QuickGELU",
- "__torch__.torch.nn.modules.container.___torch_mangle_555.Sequential",
- "collections.OrderedDict",
- "__torch__.clip.model.___torch_mangle_493.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_477.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.container.___torch_mangle_564.Sequential",
- "__torch__.torch.nn.modules.linear.___torch_mangle_563.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_588.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_545.Linear",
- "__torch__.clip.model.___torch_mangle_607.QuickGELU",
- "__torch__.clip.model.___torch_mangle_521.ResidualAttentionBlock",
- "__torch__.clip.model.___torch_mangle_452.LayerNorm",
- "__torch__.torch.nn.modules.container.___torch_mangle_465.Sequential",
- "__torch__.clip.model.___torch_mangle_589.QuickGELU",
- "__torch__.torch.nn.modules.container.___torch_mangle_456.Sequential",
- "__torch__.torch.nn.modules.activation.___torch_mangle_532.MultiheadAttention",
- "__torch__.torch.nn.modules.activation.___torch_mangle_496.MultiheadAttention",
- "__torch__.torch.nn.modules.conv.___torch_mangle_448.Conv2d",
- "__torch__.torch.nn.modules.activation.___torch_mangle_559.MultiheadAttention",
- "__torch__.clip.model.___torch_mangle_661.QuickGELU",
- "__torch__.clip.model.___torch_mangle_669.VisionTransformer",
- "__torch__.torch.nn.modules.activation.___torch_mangle_640.MultiheadAttention",
- "__torch__.clip.model.___torch_mangle_520.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_642.Linear",
- "__torch__.torch.nn.modules.activation.___torch_mangle_568.MultiheadAttention",
- "__torch__.clip.model.___torch_mangle_557.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.linear.___torch_mangle_657.NonDynamicallyQuantizableLinear",
- "__torch__.clip.model.___torch_mangle_641.LayerNorm",
- "torch.FloatStorage",
- "__torch__.torch.nn.modules.container.___torch_mangle_501.Sequential",
- "__torch__.clip.model.___torch_mangle_547.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_651.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_579.Linear",
- "__torch__.torch.nn.modules.container.___torch_mangle_636.Sequential",
- "__torch__.clip.model.___torch_mangle_611.ResidualAttentionBlock",
- "__torch__.clip.model.___torch_mangle_499.QuickGELU",
- "__torch__.clip.model.___torch_mangle_470.LayerNorm",
- "__torch__.clip.model.___torch_mangle_580.QuickGELU",
- "__torch__.clip.model.___torch_mangle_517.QuickGELU",
- "__torch__.torch.nn.modules.activation.___torch_mangle_658.MultiheadAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_612.NonDynamicallyQuantizableLinear",
- "__torch__.clip.model.___torch_mangle_632.LayerNorm",
- "__torch__.clip.model.___torch_mangle_598.QuickGELU",
- "__torch__.torch.nn.modules.activation.___torch_mangle_622.MultiheadAttention",
- "__torch__.clip.model.___torch_mangle_488.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_615.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_531.NonDynamicallyQuantizableLinear",
- "__torch__.clip.model.___torch_mangle_542.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_585.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.container.___torch_mangle_528.Sequential",
- "__torch__.torch.nn.modules.activation.___torch_mangle_631.MultiheadAttention",
- "__torch__.torch.nn.modules.container.___torch_mangle_663.Sequential",
- "__torch__.clip.model.___torch_mangle_574.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_549.NonDynamicallyQuantizableLinear",
- "__torch__.clip.model.___torch_mangle_656.ResidualAttentionBlock",
- "__torch__.clip.model.___torch_mangle_475.LayerNorm",
- "__torch__.clip.model.___torch_mangle_625.QuickGELU",
- "__torch__.clip.model.___torch_mangle_652.QuickGELU",
- "__torch__.torch.nn.modules.linear.___torch_mangle_525.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_594.NonDynamicallyQuantizableLinear",
- "__torch__.clip.model.___torch_mangle_620.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.linear.___torch_mangle_644.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_639.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_653.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_473.Linear",
- "__torch__.torch.nn.modules.container.___torch_mangle_483.Sequential",
- "__torch__.torch.nn.modules.container.___torch_mangle_573.Sequential",
- "__torch__.clip.model.___torch_mangle_668.LayerNorm",
- "__torch__.torch.nn.modules.activation.___torch_mangle_649.MultiheadAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_567.NonDynamicallyQuantizableLinear",
- "__torch__.clip.model.___torch_mangle_551.LayerNorm",
- "__torch__.torch.nn.modules.container.___torch_mangle_537.Sequential",
- "__torch__.torch.nn.modules.activation.___torch_mangle_586.MultiheadAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_462.Linear",
- "__torch__.clip.model.___torch_mangle_457.LayerNorm",
- "__torch__.clip.model.___torch_mangle_638.ResidualAttentionBlock",
- "__torch__.clip.model.___torch_mangle_569.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_480.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_630.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_491.Linear",
- "__torch__.clip.model.___torch_mangle_530.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.linear.___torch_mangle_489.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_576.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_572.Linear",
- "__torch__.torch.nn.modules.container.___torch_mangle_492.Sequential",
- "__torch__.torch.nn.modules.container.___torch_mangle_591.Sequential",
- "__torch__.torch.nn.modules.activation.___torch_mangle_514.MultiheadAttention",
- "__torch__.clip.model.___torch_mangle_566.ResidualAttentionBlock",
- "__torch__.clip.model.___torch_mangle_610.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_662.Linear",
- "__torch__.clip.model.___torch_mangle_479.LayerNorm",
- "__torch__.torch.nn.modules.activation.___torch_mangle_460.MultiheadAttention",
- "__torch__.clip.model.___torch_mangle_587.LayerNorm",
- "__torch__.clip.model.___torch_mangle_490.QuickGELU",
- "__torch__.clip.model.___torch_mangle_538.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_504.NonDynamicallyQuantizableLinear",
- "torch.HalfStorage",
- "__torch__.torch.nn.modules.container.___torch_mangle_609.Sequential",
- "__torch__.torch.nn.modules.container.___torch_mangle_666.Sequential",
- "__torch__.torch.nn.modules.activation.___torch_mangle_487.MultiheadAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_495.NonDynamicallyQuantizableLinear",
- "__torch__.clip.model.___torch_mangle_524.LayerNorm",
- "__torch__.clip.model.___torch_mangle_467.ResidualAttentionBlock",
- "__torch__.clip.model.___torch_mangle_553.QuickGELU",
- "__torch__.torch.nn.modules.linear.___torch_mangle_590.Linear",
- "torch._utils._rebuild_tensor_v2",
- "__torch__.clip.model.___torch_mangle_584.ResidualAttentionBlock",
- "__torch__.clip.model.___torch_mangle_515.LayerNorm",
- "__torch__.clip.model.___torch_mangle_556.LayerNorm",
- "__torch__.clip.model.___torch_mangle_602.ResidualAttentionBlock",
- "__torch__.clip.model.___torch_mangle_646.LayerNorm",
- "__torch__.clip.model.___torch_mangle_512.ResidualAttentionBlock",
- "__torch__.clip.model.___torch_mangle_628.LayerNorm",
- "__torch__.clip.model.___torch_mangle_650.LayerNorm",
- "__torch__.clip.model.___torch_mangle_449.LayerNorm",
- "__torch__.clip.model.___torch_mangle_529.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_455.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_486.NonDynamicallyQuantizableLinear",
- "__torch__.clip.model.___torch_mangle_494.ResidualAttentionBlock",
- "__torch__.clip.model.___torch_mangle_592.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_500.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_468.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_606.Linear",
- "__torch__.clip.model.___torch_mangle_506.LayerNorm",
- "__torch__.torch.nn.modules.container.___torch_mangle_600.Sequential",
- "__torch__.clip.model.___torch_mangle_665.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.linear.___torch_mangle_597.Linear",
- "__torch__.clip.model.___torch_mangle_637.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_459.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_527.Linear",
- "__torch__.torch.nn.modules.activation.___torch_mangle_595.MultiheadAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_540.NonDynamicallyQuantizableLinear",
- "__torch__.clip.model.___torch_mangle_463.QuickGELU",
- "__torch__.torch.nn.modules.linear.___torch_mangle_558.NonDynamicallyQuantizableLinear",
- "__torch__.clip.model.___torch_mangle_601.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_633.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_534.Linear",
- "__torch__.torch.nn.modules.container.___torch_mangle_654.Sequential",
- "__torch__.torch.nn.modules.linear.___torch_mangle_617.Linear",
- "__torch__.clip.model.___torch_mangle_596.LayerNorm",
- "__torch__.clip.model.___torch_mangle_484.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_599.Linear",
- "__torch__.clip.model.___torch_mangle_614.LayerNorm",
- "__torch__.clip.model.___torch_mangle_539.ResidualAttentionBlock",
- "__torch__.clip.model.___torch_mangle_647.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.container.___torch_mangle_510.Sequential",
- "__torch__.torch.nn.modules.activation.___torch_mangle_550.MultiheadAttention",
- "__torch__.clip.model.___torch_mangle_560.LayerNorm",
- "__torch__.torch.nn.modules.activation.___torch_mangle_613.MultiheadAttention",
- "__torch__.torch.nn.modules.activation.___torch_mangle_604.MultiheadAttention",
- "__torch__.torch.nn.modules.activation.___torch_mangle_478.MultiheadAttention",
- "__torch__.clip.model.___torch_mangle_535.QuickGELU",
- "__torch__.torch.nn.modules.linear.___torch_mangle_554.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_660.Linear",
- "__torch__.clip.model.___torch_mangle_458.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.linear.___torch_mangle_498.Linear",
- "__torch__.clip.model.___torch_mangle_472.QuickGELU",
- "__torch__.torch.nn.modules.linear.___torch_mangle_464.Linear",
- "__torch__.clip.model.___torch_mangle_548.ResidualAttentionBlock",
- "__torch__.clip.model.___torch_mangle_659.LayerNorm",
- "__torch__.clip.model.___torch_mangle_508.QuickGELU",
- "__torch__.torch.nn.modules.linear.___torch_mangle_453.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_536.Linear",
- "__torch__.torch.nn.modules.container.___torch_mangle_645.Sequential",
- "__torch__.torch.nn.modules.linear.___torch_mangle_561.Linear",
- "__torch__.torch.nn.modules.container.___torch_mangle_582.Sequential",
- "__torch__.torch.nn.modules.activation.___torch_mangle_451.MultiheadAttention",
- "__torch__.clip.model.___torch_mangle_497.LayerNorm",
- "__torch__.torch.nn.modules.activation.___torch_mangle_505.MultiheadAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_626.Linear",
- "__torch__.torch.nn.modules.container.___torch_mangle_627.Sequential",
- "__torch__.torch.nn.modules.container.___torch_mangle_546.Sequential",
- "__torch__.torch.nn.modules.linear.___torch_mangle_513.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_522.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_635.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_552.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_543.Linear",
- "__torch__.clip.model.___torch_mangle_623.LayerNorm",
- "__torch__.clip.model.___torch_mangle_466.LayerNorm",
- "__torch__.clip.model.___torch_mangle_565.LayerNorm",
- "__torch__.clip.model.___torch_mangle_616.QuickGELU",
- "__torch__.torch.nn.modules.linear.___torch_mangle_450.NonDynamicallyQuantizableLinear",
- "__torch__.clip.model.___torch_mangle_571.QuickGELU",
- "__torch__.torch.nn.modules.linear.___torch_mangle_603.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_570.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_648.NonDynamicallyQuantizableLinear",
- "__torch__.clip.model.___torch_mangle_575.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.linear.___torch_mangle_624.Linear",
- "__torch__.clip.model.___torch_mangle_461.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_509.Linear",
- "__torch__.torch.nn.modules.container.___torch_mangle_519.Sequential",
- "__torch__.torch.nn.modules.linear.___torch_mangle_471.Linear",
- "__torch__.clip.model.___torch_mangle_562.QuickGELU",
- "__torch__.clip.model.___torch_mangle_454.QuickGELU",
- "torch.LongStorage",
- "torch.DoubleStorage",
- "collections.OrderedDict",
- "torch._utils._rebuild_tensor_v2",
- "torch.HalfStorage",
- "torch._utils._rebuild_tensor_v2",
- "torch.jit._pickle.restore_type_tag",
- "collections.OrderedDict"
How to fix it?
611 MB
tensors
-
610 MB
Push model using huggingface_hub.