|
import argparse |
|
import json |
|
import matplotlib.pyplot as plt |
|
import math |
|
import numpy as np |
|
import os |
|
|
|
from scipy import io |
|
from scipy import stats |
|
from tqdm import tqdm |
|
import math |
|
import PIL.Image as Img |
|
import uuid |
|
|
|
def show_img(image): |
|
image = np.multiply(image, 255) |
|
image = image.reshape(int(math.sqrt(len(image))), int(math.sqrt(len(image)))) |
|
image = Img.fromarray(image) |
|
image = image.convert('L') |
|
image.save('outfile.png') |
|
|
|
def gen_data_dict(data_dict, raw_data, group_name): |
|
data_dict[group_name] = [] |
|
for _, value in raw_data.items(): |
|
for img_data, img_label in zip(value['x'], value['y']): |
|
data_dict[group_name].append({ |
|
'image': np.multiply(img_data, 255), |
|
'label': img_label |
|
}) |
|
|
|
def construct_data_dirs_clientwise(raw_data, group_name, data_dir, data_type='train'): |
|
data_group_dir = os.path.join(data_dir, 'huggingface', 'clientwise', group_name, data_type) |
|
if not os.path.exists(data_group_dir): |
|
os.makedirs(data_group_dir) |
|
|
|
for _, value in raw_data.items(): |
|
for img_data, img_label in zip(value['x'], value['y']): |
|
sample_dir = os.path.join(data_group_dir, str(img_label)) |
|
if not os.path.exists(sample_dir): |
|
os.makedirs(sample_dir) |
|
sampel_path = os.path.join(sample_dir, str(uuid.uuid1())[:8]+'.png') |
|
|
|
image = np.multiply(img_data, 255) |
|
image = image.reshape(int(math.sqrt(len(image))), int(math.sqrt(len(image)))) |
|
image = Img.fromarray(image) |
|
image = image.convert('L') |
|
image.save(sampel_path) |
|
|
|
def construct_data_dirs(raw_data, data_dir, data_type='train'): |
|
data_group_dir = os.path.join(data_dir, 'huggingface', 'centralized', data_type) |
|
if not os.path.exists(data_group_dir): |
|
os.makedirs(data_group_dir) |
|
|
|
for _, value in raw_data.items(): |
|
for img_data, img_label in zip(value['x'], value['y']): |
|
sample_dir = os.path.join(data_group_dir, str(img_label)) |
|
if not os.path.exists(sample_dir): |
|
os.makedirs(sample_dir) |
|
sampel_path = os.path.join(sample_dir, str(uuid.uuid1())[:8]+'.png') |
|
|
|
image = np.multiply(img_data, 255) |
|
image = image.reshape(int(math.sqrt(len(image))), int(math.sqrt(len(image)))) |
|
image = Img.fromarray(image) |
|
image = image.convert('L') |
|
image.save(sampel_path) |
|
|
|
def load_data(name): |
|
train_users = [] |
|
train_num_samples = [] |
|
train_data = {} |
|
|
|
test_users = [] |
|
test_num_samples = [] |
|
test_data = {} |
|
|
|
parent_path = os.path.dirname(os.path.realpath(__file__)) |
|
data_dir = os.path.join(parent_path, 'dataset', name) |
|
train_subdir = os.path.join(data_dir, 'train') |
|
test_subdir = os.path.join(data_dir, 'test') |
|
|
|
|
|
train_files = os.listdir(train_subdir) |
|
train_files = [f for f in train_files if f.endswith('.json')] |
|
|
|
for index, f in tqdm(enumerate(train_files), desc='Training Data Generating', total=len(train_files)): |
|
group_name = 'client_' + str(index) |
|
file_dir = os.path.join(train_subdir, f) |
|
|
|
with open(file_dir) as inf: |
|
data = json.load(inf) |
|
|
|
|
|
|
|
|
|
|
|
|
|
train_num_samples.extend([sum(data['num_samples'])]) |
|
gen_data_dict(train_data, data['user_data'], group_name) |
|
construct_data_dirs_clientwise(data['user_data'], group_name, data_dir, data_type='train') |
|
construct_data_dirs(data['user_data'], data_dir, data_type='train') |
|
|
|
|
|
test_files = os.listdir(test_subdir) |
|
test_files = [f for f in test_files if f.endswith('.json')] |
|
|
|
for index, f in tqdm(enumerate(test_files), desc='Testing Data Generating', total=len(test_files)): |
|
group_name = 'client_' + str(index) |
|
file_dir = os.path.join(test_subdir, f) |
|
|
|
with open(file_dir) as inf: |
|
data = json.load(inf) |
|
|
|
test_users.extend(data['users']) |
|
test_num_samples.extend([sum(data['num_samples'])]) |
|
gen_data_dict(test_data, data['user_data'], group_name) |
|
construct_data_dirs_clientwise(data['user_data'], group_name, data_dir, data_type='test') |
|
construct_data_dirs(data['user_data'], data_dir, data_type='test') |
|
|
|
return train_num_samples, train_data, test_num_samples, test_data |
|
|
|
if __name__ == '__main__': |
|
name = 'femnist-small' |
|
train_num_samples, train_data, \ |
|
test_num_samples, test_data = load_data(name) |
|
|
|
print('####################################') |
|
print('DATASET: %s' % name) |
|
print('%d train samples (total)' % np.sum(train_num_samples)) |
|
print('%d test samples (total)' % np.sum(test_num_samples)) |
|
print('%.2f train samples per user (mean)' % np.mean(train_num_samples)) |
|
print('%.2f test samples per user (mean)' % np.mean(test_num_samples)) |
|
|
|
|
|
|