Keras fit_generator issue
I followed this tutorial to create a custom generator for my Keras model. Here is an MWE that shows the issues I'm facing:
import sys, keras
import numpy as np
import tensorflow as tf
import pandas as pd
from keras.models import Model
from keras.layers import Dense, Input
from keras.optimizers import Adam
from keras.losses import binary_crossentropy
class DataGenerator(keras.utils.Sequence):
'Generates data for Keras'
def __init__(self, list_IDs, batch_size, shuffle=False):
'Initialization'
self.batch_size = batch_size
self.list_IDs = list_IDs
self.shuffle = shuffle
self.on_epoch_end()
def __len__(self):
'Denotes the number of batches per epoch'
return int(np.floor(len(self.list_IDs) / self.batch_size))
def __getitem__(self, index):
'Generate one batch of data'
# Generate indexes of the batch
#print('self.batch_size: ', self.batch_size)
print('index: ', index)
sys.exit()
def on_epoch_end(self):
'Updates indexes after each epoch'
self.indexes = np.arange(len(self.list_IDs))
print('self.indexes: ', self.indexes)
if self.shuffle == True:
np.random.shuffle(self.indexes)
def __data_generation(self, list_IDs_temp):
'Generates data containing batch_size samples' # X : (n_samples, *dim, n_channels)
X1 = np.empty((self.batch_size, 10), dtype=float)
X2 = np.empty((self.batch_size, 12), dtype=int)
#Generate data
for i, ID in enumerate(list_IDs_temp):
print('i is: ', i, 'ID is: ', ID)
#Preprocess this sample (omitted)
X1[i,] = np.repeat(1, X1.shape[1])
X2[i,] = np.repeat(2, X2.shape[1])
Y = X1[:,:-1]
return X1, X2, Y
if __name__=='__main__':
train_ids_to_use = list(np.arange(1, 321)) #1, 2, ...,320
valid_ids_to_use = list(np.arange(321, 481)) #321, 322, ..., 480
params = {'batch_size': 32}
train_generator = DataGenerator(train_ids_to_use, **params)
valid_generator = DataGenerator(valid_ids_to_use, **params)
#Build a toy model
input_1 = Input(shape=(3, 10))
input_2 = Input(shape=(3, 12))
y_input = Input(shape=(3, 10))
concat_1 = keras.layers.concatenate([input_1, input_2])
concat_2 = keras.layers.concatenate([concat_1, y_input])
dense_1 = Dense(10, activation='relu')(concat_2)
output_1 = Dense(10, activation='sigmoid')(dense_1)
model = Model([input_1, input_2, y_input], output_1)
print(model.summary())
#Compile and fit_generator
model.compile(optimizer=Adam(lr=0.001), loss=binary_crossentropy)
model.fit_generator(generator=train_generator, validation_data = valid_generator, epochs=2, verbose=2)
I don't want to shuffle my input data. I thought that was getting handled, but in my code, when I print out index
in __get_item__
, I get random numbers. I would like consecutive numbers. Notice I'm trying to kill the process using sys.exit
inside __getitem__
to see what's going on.
My questions:
Why does
index
not go consecutively? How can I fix this?When I run this in the terminal using screen, why doesn't it respond to Ctrl+C?
python keras generator
|
show 4 more comments
I followed this tutorial to create a custom generator for my Keras model. Here is an MWE that shows the issues I'm facing:
import sys, keras
import numpy as np
import tensorflow as tf
import pandas as pd
from keras.models import Model
from keras.layers import Dense, Input
from keras.optimizers import Adam
from keras.losses import binary_crossentropy
class DataGenerator(keras.utils.Sequence):
'Generates data for Keras'
def __init__(self, list_IDs, batch_size, shuffle=False):
'Initialization'
self.batch_size = batch_size
self.list_IDs = list_IDs
self.shuffle = shuffle
self.on_epoch_end()
def __len__(self):
'Denotes the number of batches per epoch'
return int(np.floor(len(self.list_IDs) / self.batch_size))
def __getitem__(self, index):
'Generate one batch of data'
# Generate indexes of the batch
#print('self.batch_size: ', self.batch_size)
print('index: ', index)
sys.exit()
def on_epoch_end(self):
'Updates indexes after each epoch'
self.indexes = np.arange(len(self.list_IDs))
print('self.indexes: ', self.indexes)
if self.shuffle == True:
np.random.shuffle(self.indexes)
def __data_generation(self, list_IDs_temp):
'Generates data containing batch_size samples' # X : (n_samples, *dim, n_channels)
X1 = np.empty((self.batch_size, 10), dtype=float)
X2 = np.empty((self.batch_size, 12), dtype=int)
#Generate data
for i, ID in enumerate(list_IDs_temp):
print('i is: ', i, 'ID is: ', ID)
#Preprocess this sample (omitted)
X1[i,] = np.repeat(1, X1.shape[1])
X2[i,] = np.repeat(2, X2.shape[1])
Y = X1[:,:-1]
return X1, X2, Y
if __name__=='__main__':
train_ids_to_use = list(np.arange(1, 321)) #1, 2, ...,320
valid_ids_to_use = list(np.arange(321, 481)) #321, 322, ..., 480
params = {'batch_size': 32}
train_generator = DataGenerator(train_ids_to_use, **params)
valid_generator = DataGenerator(valid_ids_to_use, **params)
#Build a toy model
input_1 = Input(shape=(3, 10))
input_2 = Input(shape=(3, 12))
y_input = Input(shape=(3, 10))
concat_1 = keras.layers.concatenate([input_1, input_2])
concat_2 = keras.layers.concatenate([concat_1, y_input])
dense_1 = Dense(10, activation='relu')(concat_2)
output_1 = Dense(10, activation='sigmoid')(dense_1)
model = Model([input_1, input_2, y_input], output_1)
print(model.summary())
#Compile and fit_generator
model.compile(optimizer=Adam(lr=0.001), loss=binary_crossentropy)
model.fit_generator(generator=train_generator, validation_data = valid_generator, epochs=2, verbose=2)
I don't want to shuffle my input data. I thought that was getting handled, but in my code, when I print out index
in __get_item__
, I get random numbers. I would like consecutive numbers. Notice I'm trying to kill the process using sys.exit
inside __getitem__
to see what's going on.
My questions:
Why does
index
not go consecutively? How can I fix this?When I run this in the terminal using screen, why doesn't it respond to Ctrl+C?
python keras generator
I think you can achieve that by passingshuffle=False
tofit_generator
method?
– today
Nov 25 '18 at 18:43
Hi, thanks for your reply. I did that as a default in the__init__
, and I tested later whether the index values were getting shuffled by the if statement inon_epoch_end
. I found that things in the if statement did not get executed, which I think means that shuffle is indeed false.
– StatsSorceress
Nov 25 '18 at 18:48
You want the batch indices to be generated consecutively, right? So that's whatshuffle=False
argument offit_generator
is. Have you tried it?
– today
Nov 25 '18 at 18:49
Yes, please see the above comment.
– StatsSorceress
Nov 25 '18 at 18:50
Sorry, I don't get that. On my machine, when I setshuffle=False
infit_generator
call (not in the__init__
method), I would get consecutive indices.
– today
Nov 25 '18 at 18:53
|
show 4 more comments
I followed this tutorial to create a custom generator for my Keras model. Here is an MWE that shows the issues I'm facing:
import sys, keras
import numpy as np
import tensorflow as tf
import pandas as pd
from keras.models import Model
from keras.layers import Dense, Input
from keras.optimizers import Adam
from keras.losses import binary_crossentropy
class DataGenerator(keras.utils.Sequence):
'Generates data for Keras'
def __init__(self, list_IDs, batch_size, shuffle=False):
'Initialization'
self.batch_size = batch_size
self.list_IDs = list_IDs
self.shuffle = shuffle
self.on_epoch_end()
def __len__(self):
'Denotes the number of batches per epoch'
return int(np.floor(len(self.list_IDs) / self.batch_size))
def __getitem__(self, index):
'Generate one batch of data'
# Generate indexes of the batch
#print('self.batch_size: ', self.batch_size)
print('index: ', index)
sys.exit()
def on_epoch_end(self):
'Updates indexes after each epoch'
self.indexes = np.arange(len(self.list_IDs))
print('self.indexes: ', self.indexes)
if self.shuffle == True:
np.random.shuffle(self.indexes)
def __data_generation(self, list_IDs_temp):
'Generates data containing batch_size samples' # X : (n_samples, *dim, n_channels)
X1 = np.empty((self.batch_size, 10), dtype=float)
X2 = np.empty((self.batch_size, 12), dtype=int)
#Generate data
for i, ID in enumerate(list_IDs_temp):
print('i is: ', i, 'ID is: ', ID)
#Preprocess this sample (omitted)
X1[i,] = np.repeat(1, X1.shape[1])
X2[i,] = np.repeat(2, X2.shape[1])
Y = X1[:,:-1]
return X1, X2, Y
if __name__=='__main__':
train_ids_to_use = list(np.arange(1, 321)) #1, 2, ...,320
valid_ids_to_use = list(np.arange(321, 481)) #321, 322, ..., 480
params = {'batch_size': 32}
train_generator = DataGenerator(train_ids_to_use, **params)
valid_generator = DataGenerator(valid_ids_to_use, **params)
#Build a toy model
input_1 = Input(shape=(3, 10))
input_2 = Input(shape=(3, 12))
y_input = Input(shape=(3, 10))
concat_1 = keras.layers.concatenate([input_1, input_2])
concat_2 = keras.layers.concatenate([concat_1, y_input])
dense_1 = Dense(10, activation='relu')(concat_2)
output_1 = Dense(10, activation='sigmoid')(dense_1)
model = Model([input_1, input_2, y_input], output_1)
print(model.summary())
#Compile and fit_generator
model.compile(optimizer=Adam(lr=0.001), loss=binary_crossentropy)
model.fit_generator(generator=train_generator, validation_data = valid_generator, epochs=2, verbose=2)
I don't want to shuffle my input data. I thought that was getting handled, but in my code, when I print out index
in __get_item__
, I get random numbers. I would like consecutive numbers. Notice I'm trying to kill the process using sys.exit
inside __getitem__
to see what's going on.
My questions:
Why does
index
not go consecutively? How can I fix this?When I run this in the terminal using screen, why doesn't it respond to Ctrl+C?
python keras generator
I followed this tutorial to create a custom generator for my Keras model. Here is an MWE that shows the issues I'm facing:
import sys, keras
import numpy as np
import tensorflow as tf
import pandas as pd
from keras.models import Model
from keras.layers import Dense, Input
from keras.optimizers import Adam
from keras.losses import binary_crossentropy
class DataGenerator(keras.utils.Sequence):
'Generates data for Keras'
def __init__(self, list_IDs, batch_size, shuffle=False):
'Initialization'
self.batch_size = batch_size
self.list_IDs = list_IDs
self.shuffle = shuffle
self.on_epoch_end()
def __len__(self):
'Denotes the number of batches per epoch'
return int(np.floor(len(self.list_IDs) / self.batch_size))
def __getitem__(self, index):
'Generate one batch of data'
# Generate indexes of the batch
#print('self.batch_size: ', self.batch_size)
print('index: ', index)
sys.exit()
def on_epoch_end(self):
'Updates indexes after each epoch'
self.indexes = np.arange(len(self.list_IDs))
print('self.indexes: ', self.indexes)
if self.shuffle == True:
np.random.shuffle(self.indexes)
def __data_generation(self, list_IDs_temp):
'Generates data containing batch_size samples' # X : (n_samples, *dim, n_channels)
X1 = np.empty((self.batch_size, 10), dtype=float)
X2 = np.empty((self.batch_size, 12), dtype=int)
#Generate data
for i, ID in enumerate(list_IDs_temp):
print('i is: ', i, 'ID is: ', ID)
#Preprocess this sample (omitted)
X1[i,] = np.repeat(1, X1.shape[1])
X2[i,] = np.repeat(2, X2.shape[1])
Y = X1[:,:-1]
return X1, X2, Y
if __name__=='__main__':
train_ids_to_use = list(np.arange(1, 321)) #1, 2, ...,320
valid_ids_to_use = list(np.arange(321, 481)) #321, 322, ..., 480
params = {'batch_size': 32}
train_generator = DataGenerator(train_ids_to_use, **params)
valid_generator = DataGenerator(valid_ids_to_use, **params)
#Build a toy model
input_1 = Input(shape=(3, 10))
input_2 = Input(shape=(3, 12))
y_input = Input(shape=(3, 10))
concat_1 = keras.layers.concatenate([input_1, input_2])
concat_2 = keras.layers.concatenate([concat_1, y_input])
dense_1 = Dense(10, activation='relu')(concat_2)
output_1 = Dense(10, activation='sigmoid')(dense_1)
model = Model([input_1, input_2, y_input], output_1)
print(model.summary())
#Compile and fit_generator
model.compile(optimizer=Adam(lr=0.001), loss=binary_crossentropy)
model.fit_generator(generator=train_generator, validation_data = valid_generator, epochs=2, verbose=2)
I don't want to shuffle my input data. I thought that was getting handled, but in my code, when I print out index
in __get_item__
, I get random numbers. I would like consecutive numbers. Notice I'm trying to kill the process using sys.exit
inside __getitem__
to see what's going on.
My questions:
Why does
index
not go consecutively? How can I fix this?When I run this in the terminal using screen, why doesn't it respond to Ctrl+C?
python keras generator
python keras generator
edited Nov 25 '18 at 19:16
today
9,84121535
9,84121535
asked Nov 23 '18 at 2:54
StatsSorceress
98311738
98311738
I think you can achieve that by passingshuffle=False
tofit_generator
method?
– today
Nov 25 '18 at 18:43
Hi, thanks for your reply. I did that as a default in the__init__
, and I tested later whether the index values were getting shuffled by the if statement inon_epoch_end
. I found that things in the if statement did not get executed, which I think means that shuffle is indeed false.
– StatsSorceress
Nov 25 '18 at 18:48
You want the batch indices to be generated consecutively, right? So that's whatshuffle=False
argument offit_generator
is. Have you tried it?
– today
Nov 25 '18 at 18:49
Yes, please see the above comment.
– StatsSorceress
Nov 25 '18 at 18:50
Sorry, I don't get that. On my machine, when I setshuffle=False
infit_generator
call (not in the__init__
method), I would get consecutive indices.
– today
Nov 25 '18 at 18:53
|
show 4 more comments
I think you can achieve that by passingshuffle=False
tofit_generator
method?
– today
Nov 25 '18 at 18:43
Hi, thanks for your reply. I did that as a default in the__init__
, and I tested later whether the index values were getting shuffled by the if statement inon_epoch_end
. I found that things in the if statement did not get executed, which I think means that shuffle is indeed false.
– StatsSorceress
Nov 25 '18 at 18:48
You want the batch indices to be generated consecutively, right? So that's whatshuffle=False
argument offit_generator
is. Have you tried it?
– today
Nov 25 '18 at 18:49
Yes, please see the above comment.
– StatsSorceress
Nov 25 '18 at 18:50
Sorry, I don't get that. On my machine, when I setshuffle=False
infit_generator
call (not in the__init__
method), I would get consecutive indices.
– today
Nov 25 '18 at 18:53
I think you can achieve that by passing
shuffle=False
to fit_generator
method?– today
Nov 25 '18 at 18:43
I think you can achieve that by passing
shuffle=False
to fit_generator
method?– today
Nov 25 '18 at 18:43
Hi, thanks for your reply. I did that as a default in the
__init__
, and I tested later whether the index values were getting shuffled by the if statement in on_epoch_end
. I found that things in the if statement did not get executed, which I think means that shuffle is indeed false.– StatsSorceress
Nov 25 '18 at 18:48
Hi, thanks for your reply. I did that as a default in the
__init__
, and I tested later whether the index values were getting shuffled by the if statement in on_epoch_end
. I found that things in the if statement did not get executed, which I think means that shuffle is indeed false.– StatsSorceress
Nov 25 '18 at 18:48
You want the batch indices to be generated consecutively, right? So that's what
shuffle=False
argument of fit_generator
is. Have you tried it?– today
Nov 25 '18 at 18:49
You want the batch indices to be generated consecutively, right? So that's what
shuffle=False
argument of fit_generator
is. Have you tried it?– today
Nov 25 '18 at 18:49
Yes, please see the above comment.
– StatsSorceress
Nov 25 '18 at 18:50
Yes, please see the above comment.
– StatsSorceress
Nov 25 '18 at 18:50
Sorry, I don't get that. On my machine, when I set
shuffle=False
in fit_generator
call (not in the __init__
method), I would get consecutive indices.– today
Nov 25 '18 at 18:53
Sorry, I don't get that. On my machine, when I set
shuffle=False
in fit_generator
call (not in the __init__
method), I would get consecutive indices.– today
Nov 25 '18 at 18:53
|
show 4 more comments
1 Answer
1
active
oldest
votes
You can use shuffle
argument of fit_generator
method to generate batches consecutively. From fit_generator()
documentation:
shuffle: Boolean. Whether to shuffle the order of the batches at the beginning of each epoch. Only used with instances of
Sequence
(keras.utils.Sequence
). Has no effect whensteps_per_epoch
is notNone
.
Just pass shuffle=False
to fit_generator
:
model.fit_generator(generator=train_generator, shuffle=False, ...)
Okay, please let me check my understanding:fit_generator
is not being overridden, it belongs to Keras and has its own parameters. What I'm doing withDataGenerator
is creating my own generators (train_generator and valid_generator) to create slices of the data for Keras to use. But why does that mean I need to specifyshuffle=False
in the call tofit_generator
, instead of in my own generators?
– StatsSorceress
Nov 25 '18 at 19:07
@StatsSorceressfit_generator
has been implemented such that it would call the__getitem__
method of theSequence
with a given index value. So it is thefit_generator
which gives you the index of the batch to be generated. Passingshuffle=False
tofit_generator
forces it give batch indices in order, i.e. 0, 1, 2, 3, ...
– today
Nov 25 '18 at 19:12
add a comment |
Your Answer
StackExchange.ifUsing("editor", function () {
StackExchange.using("externalEditor", function () {
StackExchange.using("snippets", function () {
StackExchange.snippets.init();
});
});
}, "code-snippets");
StackExchange.ready(function() {
var channelOptions = {
tags: "".split(" "),
id: "1"
};
initTagRenderer("".split(" "), "".split(" "), channelOptions);
StackExchange.using("externalEditor", function() {
// Have to fire editor after snippets, if snippets enabled
if (StackExchange.settings.snippets.snippetsEnabled) {
StackExchange.using("snippets", function() {
createEditor();
});
}
else {
createEditor();
}
});
function createEditor() {
StackExchange.prepareEditor({
heartbeatType: 'answer',
autoActivateHeartbeat: false,
convertImagesToLinks: true,
noModals: true,
showLowRepImageUploadWarning: true,
reputationToPostImages: 10,
bindNavPrevention: true,
postfix: "",
imageUploader: {
brandingHtml: "Powered by u003ca class="icon-imgur-white" href="https://imgur.com/"u003eu003c/au003e",
contentPolicyHtml: "User contributions licensed under u003ca href="https://creativecommons.org/licenses/by-sa/3.0/"u003ecc by-sa 3.0 with attribution requiredu003c/au003e u003ca href="https://stackoverflow.com/legal/content-policy"u003e(content policy)u003c/au003e",
allowUrls: true
},
onDemand: true,
discardSelector: ".discard-answer"
,immediatelyShowMarkdownHelp:true
});
}
});
Sign up or log in
StackExchange.ready(function () {
StackExchange.helpers.onClickDraftSave('#login-link');
});
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Required, but never shown
StackExchange.ready(
function () {
StackExchange.openid.initPostLogin('.new-post-login', 'https%3a%2f%2fstackoverflow.com%2fquestions%2f53440128%2fkeras-fit-generator-issue%23new-answer', 'question_page');
}
);
Post as a guest
Required, but never shown
1 Answer
1
active
oldest
votes
1 Answer
1
active
oldest
votes
active
oldest
votes
active
oldest
votes
You can use shuffle
argument of fit_generator
method to generate batches consecutively. From fit_generator()
documentation:
shuffle: Boolean. Whether to shuffle the order of the batches at the beginning of each epoch. Only used with instances of
Sequence
(keras.utils.Sequence
). Has no effect whensteps_per_epoch
is notNone
.
Just pass shuffle=False
to fit_generator
:
model.fit_generator(generator=train_generator, shuffle=False, ...)
Okay, please let me check my understanding:fit_generator
is not being overridden, it belongs to Keras and has its own parameters. What I'm doing withDataGenerator
is creating my own generators (train_generator and valid_generator) to create slices of the data for Keras to use. But why does that mean I need to specifyshuffle=False
in the call tofit_generator
, instead of in my own generators?
– StatsSorceress
Nov 25 '18 at 19:07
@StatsSorceressfit_generator
has been implemented such that it would call the__getitem__
method of theSequence
with a given index value. So it is thefit_generator
which gives you the index of the batch to be generated. Passingshuffle=False
tofit_generator
forces it give batch indices in order, i.e. 0, 1, 2, 3, ...
– today
Nov 25 '18 at 19:12
add a comment |
You can use shuffle
argument of fit_generator
method to generate batches consecutively. From fit_generator()
documentation:
shuffle: Boolean. Whether to shuffle the order of the batches at the beginning of each epoch. Only used with instances of
Sequence
(keras.utils.Sequence
). Has no effect whensteps_per_epoch
is notNone
.
Just pass shuffle=False
to fit_generator
:
model.fit_generator(generator=train_generator, shuffle=False, ...)
Okay, please let me check my understanding:fit_generator
is not being overridden, it belongs to Keras and has its own parameters. What I'm doing withDataGenerator
is creating my own generators (train_generator and valid_generator) to create slices of the data for Keras to use. But why does that mean I need to specifyshuffle=False
in the call tofit_generator
, instead of in my own generators?
– StatsSorceress
Nov 25 '18 at 19:07
@StatsSorceressfit_generator
has been implemented such that it would call the__getitem__
method of theSequence
with a given index value. So it is thefit_generator
which gives you the index of the batch to be generated. Passingshuffle=False
tofit_generator
forces it give batch indices in order, i.e. 0, 1, 2, 3, ...
– today
Nov 25 '18 at 19:12
add a comment |
You can use shuffle
argument of fit_generator
method to generate batches consecutively. From fit_generator()
documentation:
shuffle: Boolean. Whether to shuffle the order of the batches at the beginning of each epoch. Only used with instances of
Sequence
(keras.utils.Sequence
). Has no effect whensteps_per_epoch
is notNone
.
Just pass shuffle=False
to fit_generator
:
model.fit_generator(generator=train_generator, shuffle=False, ...)
You can use shuffle
argument of fit_generator
method to generate batches consecutively. From fit_generator()
documentation:
shuffle: Boolean. Whether to shuffle the order of the batches at the beginning of each epoch. Only used with instances of
Sequence
(keras.utils.Sequence
). Has no effect whensteps_per_epoch
is notNone
.
Just pass shuffle=False
to fit_generator
:
model.fit_generator(generator=train_generator, shuffle=False, ...)
edited Nov 25 '18 at 19:07
answered Nov 25 '18 at 19:05
today
9,84121535
9,84121535
Okay, please let me check my understanding:fit_generator
is not being overridden, it belongs to Keras and has its own parameters. What I'm doing withDataGenerator
is creating my own generators (train_generator and valid_generator) to create slices of the data for Keras to use. But why does that mean I need to specifyshuffle=False
in the call tofit_generator
, instead of in my own generators?
– StatsSorceress
Nov 25 '18 at 19:07
@StatsSorceressfit_generator
has been implemented such that it would call the__getitem__
method of theSequence
with a given index value. So it is thefit_generator
which gives you the index of the batch to be generated. Passingshuffle=False
tofit_generator
forces it give batch indices in order, i.e. 0, 1, 2, 3, ...
– today
Nov 25 '18 at 19:12
add a comment |
Okay, please let me check my understanding:fit_generator
is not being overridden, it belongs to Keras and has its own parameters. What I'm doing withDataGenerator
is creating my own generators (train_generator and valid_generator) to create slices of the data for Keras to use. But why does that mean I need to specifyshuffle=False
in the call tofit_generator
, instead of in my own generators?
– StatsSorceress
Nov 25 '18 at 19:07
@StatsSorceressfit_generator
has been implemented such that it would call the__getitem__
method of theSequence
with a given index value. So it is thefit_generator
which gives you the index of the batch to be generated. Passingshuffle=False
tofit_generator
forces it give batch indices in order, i.e. 0, 1, 2, 3, ...
– today
Nov 25 '18 at 19:12
Okay, please let me check my understanding:
fit_generator
is not being overridden, it belongs to Keras and has its own parameters. What I'm doing with DataGenerator
is creating my own generators (train_generator and valid_generator) to create slices of the data for Keras to use. But why does that mean I need to specify shuffle=False
in the call to fit_generator
, instead of in my own generators?– StatsSorceress
Nov 25 '18 at 19:07
Okay, please let me check my understanding:
fit_generator
is not being overridden, it belongs to Keras and has its own parameters. What I'm doing with DataGenerator
is creating my own generators (train_generator and valid_generator) to create slices of the data for Keras to use. But why does that mean I need to specify shuffle=False
in the call to fit_generator
, instead of in my own generators?– StatsSorceress
Nov 25 '18 at 19:07
@StatsSorceress
fit_generator
has been implemented such that it would call the __getitem__
method of the Sequence
with a given index value. So it is the fit_generator
which gives you the index of the batch to be generated. Passing shuffle=False
to fit_generator
forces it give batch indices in order, i.e. 0, 1, 2, 3, ...– today
Nov 25 '18 at 19:12
@StatsSorceress
fit_generator
has been implemented such that it would call the __getitem__
method of the Sequence
with a given index value. So it is the fit_generator
which gives you the index of the batch to be generated. Passing shuffle=False
to fit_generator
forces it give batch indices in order, i.e. 0, 1, 2, 3, ...– today
Nov 25 '18 at 19:12
add a comment |
Thanks for contributing an answer to Stack Overflow!
- Please be sure to answer the question. Provide details and share your research!
But avoid …
- Asking for help, clarification, or responding to other answers.
- Making statements based on opinion; back them up with references or personal experience.
To learn more, see our tips on writing great answers.
Some of your past answers have not been well-received, and you're in danger of being blocked from answering.
Please pay close attention to the following guidance:
- Please be sure to answer the question. Provide details and share your research!
But avoid …
- Asking for help, clarification, or responding to other answers.
- Making statements based on opinion; back them up with references or personal experience.
To learn more, see our tips on writing great answers.
Sign up or log in
StackExchange.ready(function () {
StackExchange.helpers.onClickDraftSave('#login-link');
});
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Required, but never shown
StackExchange.ready(
function () {
StackExchange.openid.initPostLogin('.new-post-login', 'https%3a%2f%2fstackoverflow.com%2fquestions%2f53440128%2fkeras-fit-generator-issue%23new-answer', 'question_page');
}
);
Post as a guest
Required, but never shown
Sign up or log in
StackExchange.ready(function () {
StackExchange.helpers.onClickDraftSave('#login-link');
});
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Required, but never shown
Sign up or log in
StackExchange.ready(function () {
StackExchange.helpers.onClickDraftSave('#login-link');
});
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Required, but never shown
Sign up or log in
StackExchange.ready(function () {
StackExchange.helpers.onClickDraftSave('#login-link');
});
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
I think you can achieve that by passing
shuffle=False
tofit_generator
method?– today
Nov 25 '18 at 18:43
Hi, thanks for your reply. I did that as a default in the
__init__
, and I tested later whether the index values were getting shuffled by the if statement inon_epoch_end
. I found that things in the if statement did not get executed, which I think means that shuffle is indeed false.– StatsSorceress
Nov 25 '18 at 18:48
You want the batch indices to be generated consecutively, right? So that's what
shuffle=False
argument offit_generator
is. Have you tried it?– today
Nov 25 '18 at 18:49
Yes, please see the above comment.
– StatsSorceress
Nov 25 '18 at 18:50
Sorry, I don't get that. On my machine, when I set
shuffle=False
infit_generator
call (not in the__init__
method), I would get consecutive indices.– today
Nov 25 '18 at 18:53