--- title: Mixed data keywords: fastai sidebar: home_sidebar summary: "DataLoader than can take data from multiple dataloaders with different types of data" description: "DataLoader than can take data from multiple dataloaders with different types of data" nb_path: "nbs/022_data.mixed.ipynb" ---
{% raw %}
{% endraw %} {% raw %}
{% endraw %} {% raw %}

class MixedDataLoader[source]

MixedDataLoader(*loaders, path='.', shuffle=False, device=None, bs=None)

{% endraw %} {% raw %}

class MixedDataLoaders[source]

MixedDataLoaders(*loaders, path='.', device=None) :: DataLoaders

Basic wrapper around several DataLoaders.

{% endraw %} {% raw %}
{% endraw %} {% raw %}

get_mixed_dls[source]

get_mixed_dls(*dls, device=None, shuffle_train=None, shuffle_valid=None, **kwargs)

{% endraw %} {% raw %}
{% endraw %} {% raw %}
from tsai.data.tabular import *

path = untar_data(URLs.ADULT_SAMPLE)
df = pd.read_csv(path/'adult.csv')
# df['salary'] = np.random.rand(len(df)) # uncomment to simulate a cont dependent variable
target = 'salary'
splits = RandomSplitter()(range_of(df))

cat_names = ['workclass', 'education', 'marital-status']
cont_names = ['age', 'fnlwgt']
dls1 = get_tabular_dls(df, cat_names=cat_names, cont_names=cont_names, y_names=target, splits=splits, bs=512)
dls1.show_batch()

cat_names = None #['occupation', 'relationship', 'race']
cont_names = ['education-num']
dls2 = get_tabular_dls(df, cat_names=cat_names, cont_names=cont_names, y_names=target, splits=splits, bs=128)
dls2.show_batch()
workclass education marital-status age fnlwgt salary
0 ? Some-college Married-civ-spouse 62.999999 149697.998687 <50k
1 Private 5th-6th Separated 36.000000 177616.000313 <50k
2 Local-gov Some-college Separated 30.000000 178383.000379 <50k
3 Self-emp-not-inc Some-college Married-civ-spouse 27.000000 411950.011190 <50k
4 Private Bachelors Married-civ-spouse 37.000000 192938.999932 >=50k
5 Private Masters Divorced 54.000000 161691.000074 >=50k
6 Private HS-grad Married-civ-spouse 36.000000 95336.001179 >=50k
7 State-gov HS-grad Married-civ-spouse 46.000000 273770.997233 <50k
8 Self-emp-not-inc HS-grad Married-civ-spouse 68.000001 197015.000115 <50k
9 Self-emp-not-inc Some-college Married-civ-spouse 28.000000 149323.999684 <50k
education-num_na education-num salary
0 False 10.0 <50k
1 False 9.0 <50k
2 False 9.0 >=50k
3 False 10.0 <50k
4 False 13.0 >=50k
5 False 13.0 <50k
6 False 10.0 <50k
7 False 11.0 <50k
8 False 12.0 >=50k
9 False 10.0 <50k
{% endraw %} {% raw %}
dls = get_mixed_dls(dls1, dls2, bs=8)
first(dls.train)
first(dls.valid)
torch.save(dls,'export/mixed_dls.pth')
del dls
dls = torch.load('export/mixed_dls.pth')
dls.train.show_batch()
workclass education marital-status age fnlwgt salary
0 Self-emp-not-inc HS-grad Never-married 19.000000 137577.998451 <50k
1 Private HS-grad Never-married 23.000001 199884.000276 <50k
2 Self-emp-not-inc Prof-school Married-civ-spouse 52.999999 33303.999880 >=50k
3 Private 10th Never-married 28.000000 204516.000215 <50k
4 Private 10th Never-married 28.000000 412148.999546 <50k
5 Self-emp-not-inc Bachelors Married-civ-spouse 57.999999 310013.997374 <50k
6 State-gov HS-grad Never-married 35.000000 237873.000453 <50k
7 Private Bachelors Married-civ-spouse 37.000000 178948.000389 >=50k
education-num_na education-num salary
0 False 9.0 <50k
1 False 9.0 <50k
2 False 15.0 >=50k
3 False 6.0 <50k
4 False 6.0 <50k
5 False 13.0 <50k
6 False 9.0 <50k
7 False 13.0 >=50k
{% endraw %} {% raw %}
xb, yb = first(dls.train)
xb
((tensor([[ 7, 12,  5],
          [ 5, 12,  5],
          [ 7, 15,  3],
          [ 5,  1,  5],
          [ 5,  1,  5],
          [ 7, 10,  3],
          [ 8, 12,  5],
          [ 5, 10,  3]]),
  tensor([[-1.4394, -0.4971],
          [-1.1456,  0.0957],
          [ 1.0581, -1.4893],
          [-0.7783,  0.1397],
          [-0.7783,  2.1153],
          [ 1.4254,  1.1435],
          [-0.2641,  0.4571],
          [-0.1172, -0.1035]])),
 (tensor([[1],
          [1],
          [1],
          [1],
          [1],
          [1],
          [1],
          [1]]),
  tensor([[-0.4232],
          [-0.4232],
          [ 1.9228],
          [-1.5961],
          [-1.5961],
          [ 1.1408],
          [-0.4232],
          [ 1.1408]])))
{% endraw %} {% raw %}
xs, ys = first(dls.train)
xs[0][0].shape, xs[0][1].shape, xs[1][0].shape, xs[1][1].shape
(torch.Size([8, 3]),
 torch.Size([8, 2]),
 torch.Size([8, 1]),
 torch.Size([8, 1]))
{% endraw %} {% raw %}
from tsai.data.validation import TimeSplitter
from tsai.data.core import TSRegression, get_ts_dls
X = np.repeat(np.repeat(np.arange(8)[:, None, None], 2, 1), 5, 2).astype(float)
X = np.concatenate([X, X])
y = np.concatenate([np.arange(len(X)//2)]*2)
alphabet = np.array(list(string.ascii_lowercase))
# y = alphabet[y]
splits = TimeSplitter(.5, show_plot=False)(range_of(X))
tfms = [None, TSRegression()]
dls1 = get_ts_dls(X, y, splits=splits, tfms=tfms)
dls1.one_batch()
(TSTensor(samples:8, vars:2, len:5, device=cpu),
 tensor([0., 1., 2., 3., 4., 5., 6., 7.]))
{% endraw %} {% raw %}
data = np.concatenate([np.repeat(np.arange(8)[:, None], 3, 1)*np.array([1, 10, 100])]*2)
df = pd.DataFrame(data, columns=['cat1', 'cat2', 'cont'])
df['cont'] = df['cont'].astype(float)
df['target'] = y
cat_names = ['cat1', 'cat2']
cont_names = ['cont']
target = 'target'
dls2 = get_tabular_dls(df, procs=[Categorify, FillMissing, #Normalize
                                 ], cat_names=cat_names, cont_names=cont_names, y_names=target, splits=splits, bs=8)
dls2.one_batch()
(tensor([[7, 7],
         [1, 1],
         [8, 8],
         [6, 6],
         [4, 4],
         [2, 2],
         [5, 5],
         [3, 3]]),
 tensor([[600.],
         [  0.],
         [700.],
         [500.],
         [300.],
         [100.],
         [400.],
         [200.]]),
 tensor([[6],
         [0],
         [7],
         [5],
         [3],
         [1],
         [4],
         [2]], dtype=torch.int8))
{% endraw %} {% raw %}
z = zip(_loaders[dls1.train.fake_l.num_workers == 0](dls1.train.fake_l))
for b in z: 
    print(b)
    break
((TSTensor(samples:8, vars:2, len:5, device=cpu), tensor([0., 1., 2., 3., 4., 5., 6., 7.])),)
{% endraw %} {% raw %}
bs = 8
dls = get_mixed_dls(dls1, dls2, bs=bs)
dl = dls.train
xb, yb = dl.one_batch()
test_eq(len(xb), 2)
test_eq(len(xb[0]), bs)
test_eq(len(xb[1]), 2)
test_eq(len(xb[1][0]), bs)
test_eq(len(xb[1][1]), bs)
test_eq(xb[0].data[:, 0, 0].long(), xb[1][0][:, 0] - 1) # categorical data and ts are in synch
test_eq(xb[0].data[:, 0, 0], (xb[1][1]/100).flatten()) # continuous data and ts are in synch
test_eq(tensor(dl.input_idxs), yb.long().cpu())
dl = dls.valid
xb, yb = dl.one_batch()
test_eq(tensor(y[dl.input_idxs]), yb.long().cpu())
{% endraw %}