--- title: Mixed data keywords: fastai sidebar: home_sidebar summary: "DataLoader than can take data from multiple dataloaders with different types of data" description: "DataLoader than can take data from multiple dataloaders with different types of data" nb_path: "nbs/022_data.mixed.ipynb" ---
{% raw %}
{% endraw %} {% raw %}
{% endraw %} {% raw %}

class MixedDataLoader[source]

MixedDataLoader(*loaders, path='.', shuffle=False, device=None, bs=None)

{% endraw %} {% raw %}

class MixedDataLoaders[source]

MixedDataLoaders(*loaders, path='.', device=None) :: DataLoaders

Basic wrapper around several DataLoaders.

{% endraw %} {% raw %}
{% endraw %} {% raw %}

get_mixed_dls[source]

get_mixed_dls(*dls, device=None, shuffle_train=None, shuffle_valid=None, **kwargs)

{% endraw %} {% raw %}
{% endraw %} {% raw %}
from tsai.data.tabular import *

path = untar_data(URLs.ADULT_SAMPLE)
df = pd.read_csv(path/'adult.csv')
# df['salary'] = np.random.rand(len(df)) # uncomment to simulate a cont dependent variable
target = 'salary'
splits = RandomSplitter()(range_of(df))

cat_names = ['workclass', 'education', 'marital-status']
cont_names = ['age', 'fnlwgt']
dls1 = get_tabular_dls(df, cat_names=cat_names, cont_names=cont_names, y_names=target, splits=splits, bs=512)
dls1.show_batch()

cat_names = None #['occupation', 'relationship', 'race']
cont_names = ['education-num']
dls2 = get_tabular_dls(df, cat_names=cat_names, cont_names=cont_names, y_names=target, splits=splits, bs=128)
dls2.show_batch()
workclass education marital-status age fnlwgt salary
0 Private Some-college Never-married 27.000000 180262.000109 <50k
1 Self-emp-not-inc HS-grad Married-civ-spouse 59.000000 32551.997386 <50k
2 Local-gov Masters Married-civ-spouse 28.000000 168524.000132 >=50k
3 Private Some-college Divorced 49.000000 50567.005612 <50k
4 State-gov HS-grad Divorced 58.000000 300622.997833 <50k
5 Private Some-college Married-civ-spouse 32.000000 188246.000002 >=50k
6 ? Some-college Widowed 65.999999 186061.000146 <50k
7 Private HS-grad Never-married 22.000001 222992.998895 <50k
8 Private Bachelors Never-married 23.000000 210443.000744 <50k
9 Self-emp-not-inc Some-college Never-married 27.000000 70656.994661 <50k
education-num_na education-num salary
0 False 9.0 <50k
1 False 13.0 <50k
2 False 14.0 <50k
3 False 9.0 <50k
4 False 9.0 >=50k
5 False 9.0 <50k
6 False 10.0 <50k
7 False 13.0 <50k
8 False 13.0 <50k
9 False 9.0 >=50k
{% endraw %} {% raw %}
dls = get_mixed_dls(dls1, dls2, bs=8)
first(dls.train)
first(dls.valid)
torch.save(dls,'export/mixed_dls.pth')
del dls
dls = torch.load('export/mixed_dls.pth')
dls.train.show_batch()
workclass education marital-status age fnlwgt salary
0 Self-emp-inc HS-grad Married-civ-spouse 61.000000 84408.999910 >=50k
1 Local-gov HS-grad Married-civ-spouse 68.999999 197288.000325 <50k
2 ? Prof-school Divorced 68.999999 259323.001677 <50k
3 Private Some-college Divorced 63.000000 339472.998848 <50k
4 Self-emp-not-inc Some-college Married-civ-spouse 60.000001 264313.999119 <50k
5 Private Bachelors Divorced 58.000000 142325.999530 <50k
6 Private Some-college Never-married 22.000001 189924.000004 <50k
7 ? Some-college Never-married 20.000001 114357.000798 <50k
education-num_na education-num salary
0 False 9.0 >=50k
1 False 9.0 <50k
2 False 15.0 <50k
3 False 10.0 <50k
4 False 10.0 <50k
5 False 13.0 <50k
6 False 10.0 <50k
7 False 10.0 <50k
{% endraw %} {% raw %}
xb, yb = first(dls.train)
xb
((tensor([[ 6, 12,  3],
          [ 3, 12,  3],
          [ 1, 15,  1],
          [ 5, 16,  1],
          [ 7, 16,  3],
          [ 5, 10,  1],
          [ 5, 16,  5],
          [ 1, 16,  5]]),
  tensor([[ 1.6390, -0.9974],
          [ 2.2258,  0.0738],
          [ 2.2258,  0.6624],
          [ 1.7857,  1.4230],
          [ 1.5657,  0.7098],
          [ 1.4190, -0.4478],
          [-1.2215,  0.0039],
          [-1.3682, -0.7132]])),
 (tensor([[1],
          [1],
          [1],
          [1],
          [1],
          [1],
          [1],
          [1]]),
  tensor([[-0.4260],
          [-0.4260],
          [ 1.9216],
          [-0.0347],
          [-0.0347],
          [ 1.1390],
          [-0.0347],
          [-0.0347]])))
{% endraw %} {% raw %}
xs, ys = first(dls.train)
xs[0][0].shape, xs[0][1].shape, xs[1][0].shape, xs[1][1].shape
(torch.Size([8, 3]),
 torch.Size([8, 2]),
 torch.Size([8, 1]),
 torch.Size([8, 1]))
{% endraw %} {% raw %}
from tsai.data.validation import TimeSplitter
from tsai.data.core import TSRegression, get_ts_dls
X = np.repeat(np.repeat(np.arange(8)[:, None, None], 2, 1), 5, 2).astype(float)
X = np.concatenate([X, X])
y = np.concatenate([np.arange(len(X)//2)]*2)
alphabet = np.array(list(string.ascii_lowercase))
# y = alphabet[y]
splits = TimeSplitter(.5, show_plot=False)(range_of(X))
tfms = [None, TSRegression()]
dls1 = get_ts_dls(X, y, splits=splits, tfms=tfms)
dls1.one_batch()
(TSTensor(samples:8, vars:2, len:5, device=cpu),
 tensor([0., 1., 2., 3., 4., 5., 6., 7.]))
{% endraw %} {% raw %}
data = np.concatenate([np.repeat(np.arange(8)[:, None], 3, 1)*np.array([1, 10, 100])]*2)
df = pd.DataFrame(data, columns=['cat1', 'cat2', 'cont'])
df['cont'] = df['cont'].astype(float)
df['target'] = y
cat_names = ['cat1', 'cat2']
cont_names = ['cont']
target = 'target'
dls2 = get_tabular_dls(df, procs=[Categorify, FillMissing, #Normalize
                                 ], cat_names=cat_names, cont_names=cont_names, y_names=target, splits=splits, bs=8)
dls2.one_batch()
(tensor([[2, 2],
         [4, 4],
         [6, 6],
         [8, 8],
         [7, 7],
         [1, 1],
         [3, 3],
         [5, 5]]),
 tensor([[100.],
         [300.],
         [500.],
         [700.],
         [600.],
         [  0.],
         [200.],
         [400.]]),
 tensor([[1],
         [3],
         [5],
         [7],
         [6],
         [0],
         [2],
         [4]], dtype=torch.int8))
{% endraw %} {% raw %}
z = zip(_loaders[dls1.train.fake_l.num_workers == 0](dls1.train.fake_l))
for b in z: 
    print(b)
    break
((TSTensor(samples:8, vars:2, len:5, device=cpu), tensor([0., 1., 2., 3., 4., 5., 6., 7.])),)
{% endraw %} {% raw %}
bs = 8
dls = get_mixed_dls(dls1, dls2, bs=bs)
dl = dls.train
xb, yb = dl.one_batch()
test_eq(len(xb), 2)
test_eq(len(xb[0]), bs)
test_eq(len(xb[1]), 2)
test_eq(len(xb[1][0]), bs)
test_eq(len(xb[1][1]), bs)
test_eq(xb[0].data[:, 0, 0].long(), xb[1][0][:, 0] - 1) # categorical data and ts are in synch
test_eq(xb[0].data[:, 0, 0], (xb[1][1]/100).flatten()) # continuous data and ts are in synch
test_eq(tensor(dl.input_idxs), yb.long().cpu())
dl = dls.valid
xb, yb = dl.one_batch()
test_eq(tensor(y[dl.input_idxs]), yb.long().cpu())
{% endraw %}