Datasets:
Tasks:
Image Classification
Sub-tasks:
multi-class-image-classification
Languages:
English
Size:
1K<n<10K
Tags:
biology
License:
update script now generates tar gz
Browse files- data/test.tar.gz +3 -0
- data/train.tar.gz +3 -0
- data/valid.tar.gz +3 -0
- update.py +15 -0
data/test.tar.gz
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b8cbb5f6582b5c6686cbf34a1982801b2a8cf6bfe976069725149dd495565576
|
3 |
+
size 59634724
|
data/train.tar.gz
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1ce7f2e124a40ae6a7998d17ef45bb7d92cfa44df6b14e1b3ff1afbc7f5e9b8c
|
3 |
+
size 1865954410
|
data/valid.tar.gz
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:70e08880734bf8e4271c2a67a9b77c76aff657bbd24b53411998299bf0e6494e
|
3 |
+
size 59281601
|
update.py
CHANGED
@@ -1,6 +1,7 @@
|
|
1 |
import os
|
2 |
from kaggle.api.kaggle_api_extended import KaggleApi
|
3 |
import shutil
|
|
|
4 |
|
5 |
data_dir = 'data/'
|
6 |
kaggle_api = KaggleApi()
|
@@ -14,3 +15,17 @@ fault_path = os.path.join(data_dir, 'valid', 'PARAKETT AUKLET')
|
|
14 |
correct_path = os.path.join(data_dir, 'valid', 'PARAKETT AUKLET')
|
15 |
shutil.rmtree(correct_path, ignore_errors=True)
|
16 |
shutil.move(fault_path, correct_path)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import os
|
2 |
from kaggle.api.kaggle_api_extended import KaggleApi
|
3 |
import shutil
|
4 |
+
import tarfile
|
5 |
|
6 |
data_dir = 'data/'
|
7 |
kaggle_api = KaggleApi()
|
|
|
15 |
correct_path = os.path.join(data_dir, 'valid', 'PARAKETT AUKLET')
|
16 |
shutil.rmtree(correct_path, ignore_errors=True)
|
17 |
shutil.move(fault_path, correct_path)
|
18 |
+
|
19 |
+
# Compressing the train directory
|
20 |
+
with tarfile.open(os.path.join(data_dir, 'train.tar.gz'), 'w:gz') as tar:
|
21 |
+
tar.add(os.path.join(data_dir, 'train'), arcname=os.path.basename(os.path.join(data_dir, 'train')))
|
22 |
+
|
23 |
+
# Compressing the test directory
|
24 |
+
with tarfile.open(os.path.join(data_dir, 'test.tar.gz'), 'w:gz') as tar:
|
25 |
+
tar.add(os.path.join(data_dir, 'test'), arcname=os.path.basename(os.path.join(data_dir, 'test')))
|
26 |
+
|
27 |
+
# Compressing the valid directory
|
28 |
+
with tarfile.open(os.path.join(data_dir, 'valid.tar.gz'), 'w:gz') as tar:
|
29 |
+
tar.add(os.path.join(data_dir, 'valid'), arcname=os.path.basename(os.path.join(data_dir, 'valid')))
|
30 |
+
|
31 |
+
os.remove(os.path.join(data_dir, 'EfficientNetB0-525-(224 X 224)- 98.97.h5'))
|