SlideShare ist ein Scribd-Unternehmen logo
1 von 9
Downloaden Sie, um offline zu lesen
'2.5.0'
<__array_function__ internals>:5: VisibleDeprecationWarning: Creating an ndarray fro
m ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays w
ith different lengths or shapes) is deprecated. If you meant to do this, you must sp
ecify 'dtype=object' when creating the ndarray

C:Userswaleeanaconda3libsite-packageskerasdatasetsimdb.py:155: VisibleDeprec
ationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-t
uple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated.
If you meant to do this, you must specify 'dtype=object' when creating the ndarray

x_train, y_train = np.array(xs[:idx]), np.array(labels[:idx])

C:Userswaleeanaconda3libsite-packageskerasdatasetsimdb.py:156: VisibleDeprec
ationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-t
uple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated.
If you meant to do this, you must specify 'dtype=object' when creating the ndarray

x_test, y_test = np.array(xs[idx:]), np.array(labels[idx:])

[1,

14,

22,

16,

43,

530,

973,

1622,

1385,

65,

458,

4468,

66,

3941,

4,

173,

36,

256,

5,

25,

100,

43,

838,

112,

50,

670,

2,

9,

35,

480,

284,

5,

In [1]: # Import libraries

from tensorflow import keras

from tensorflow.keras.datasets import mnist

from tensorflow.keras.models import Sequential

from tensorflow.keras.layers import Dense, Dropout

from tensorflow.keras.optimizers import RMSprop

In [2]: import keras

keras.__version__

Out[2]:
In [3]: # Load the dataset

from keras.datasets import imdb

(train_data, train_labels), (test_data, test_labels) = imdb.load_data(num_words=1000
In [4]: train_data[0]

Out[4]:
150,

4,

172,

112,

167,

2,

336,

385,

39,

4,

172,

4536,

1111,

17,

546,

38,

13,

447,

4,

192,

50,

16,

6,

147,

2025,

19,

14,

22,

4,

1920,

4613,

469,

4,

22,

71,

87,

12,

16,

43,

530,

38,

76,

15,

13,

1247,

4,

22,

17,

515,

17,

12,

16,

626,

18,

2,

5,

62,

386,

12,

8,

316,

8,

106,

5,

4,

2223,

5244,

16,

480,
66,

3785,

33,

4,

130,

12,

16,

38,

619,

5,

25,

124,

51,

36,

135,

48,

25,

1415,

33,

6,

22,

12,

215,

28,

77,

52,

5,

14,

407,

16,

82,

2,

8,

4,

107,

117,

5952,

15,

256,

4,

2,

7,

3766,

5,

723,

36,

71,

43,

530,

476,

26,

400,

317,

46,

7,

4,

2,

1029,

13,

104,

88,

4,

381,

15,

297,

98,

32,

2071,

56,
26,

141,

6,

194,

7486,

18,

4,

226,

22,

21,

134,

476,

26,

480,

5,

144,

30,

5535,

18,

51,

36,

28,

224,

92,

25,

104,

4,

226,

65,

16,

38,

1334,

88,

12,

16,

283,

5,

16,

4472,

113,

103,

32,

15,

16,

5345,

19,

178,

32]
1
9999
In [5]: train_labels[0]

Out[5]:
In [6]: #Due to 10,000 most recent words restriction, no word index will exceed 10,000:

max([max(sequence) for sequence in train_data])
Out[6]:
In [7]: # word_index is a dictionary mapping words to an integer index

word_index = imdb.get_word_index()

# We reverse it, mapping integer indices to words

reverse_word_index = dict([(value, key) for (key, value) in word_index.items()])

# We decode the review; note that our indices were offset by 3

# because 0, 1 and 2 are reserved indices for "padding", "start of sequence", and "u
decoded_review = ' '.join([reverse_word_index.get(i - 3, '?') for i in train_data[0]
decoded_review
"? this film was just brilliant casting location scenery story direction everyone's
really suited the part they played and you could just imagine being there robert ? i
s an amazing actor and now the same being director ? father came from the same scott
ish island as myself so i loved the fact there was a real connection with this film
the witty remarks throughout the film were great it was just brilliant so much that
i bought the film as soon as it was released for ? and would recommend it to everyon
e to watch and the fly fishing was amazing really cried at the end it was so sad and
you know what they say if you cry at a film it must have been good and this definite
ly was also ? to the two little boy's that played the ? of norman and paul they were
just brilliant children are often left out of the ? list i think because the stars t
hat play them all grown up are such a big profile for the whole film but these child
ren are amazing and should be praised for what they have done don't you think the wh
ole story was so lovely because it was true and was someone's life after all that wa
s shared with us all"
array([0., 1., 1., ..., 0., 0., 0.])
C:Userswaleeanaconda3libsite-packagestensorflowpythonkerasoptimizer_v2opti
mizer_v2.py:374: UserWarning: The `lr` argument is deprecated, use `learning_rate` i
nstead.

warnings.warn(

Out[7]:
In [8]: import numpy as np

def vectorize_sequences(sequences, dimension=10000):



results = np.zeros((len(sequences), dimension))

for i, sequence in enumerate(sequences):

results[i, sequence] = 1. # set specific indices of results[i] to 1s

return results

# training data

x_train = vectorize_sequences(train_data)

# test data

x_test = vectorize_sequences(test_data)

In [9]: # sample train data

x_train[0]

Out[9]:
In [10]: # Vectorized labels

y_train = np.asarray(train_labels).astype('float32')

y_test = np.asarray(test_labels).astype('float32')

In [11]: #The Keras implementation

from keras import models

from keras import layers

model = models.Sequential()

model.add(layers.Dense(16, activation='relu', input_shape=(10000,)))

model.add(layers.Dense(16, activation='relu'))

model.add(layers.Dense(1, activation='sigmoid'))

In [12]: model.compile(optimizer='rmsprop',

loss='binary_crossentropy',

metrics=['accuracy'])

In [13]: from keras import optimizers

model.compile(optimizer=optimizers.RMSprop(lr=0.001),

loss='binary_crossentropy',

metrics=['accuracy'])

from keras import losses

from keras import metrics

model.compile(optimizer=optimizers.RMSprop(lr=0.001),

loss=losses.binary_crossentropy,

metrics=[metrics.binary_accuracy])

In [14]: x_val = x_train[:10000]
Epoch 1/20

30/30 [==============================] - 63s 419ms/step - loss: 0.6034 - binary_accu
racy: 0.6815 - val_loss: 0.3960 - val_binary_accuracy: 0.8684

Epoch 2/20

30/30 [==============================] - 1s 35ms/step - loss: 0.3324 - binary_accura
cy: 0.9012 - val_loss: 0.3173 - val_binary_accuracy: 0.8795

Epoch 3/20

30/30 [==============================] - 1s 30ms/step - loss: 0.2365 - binary_accura
cy: 0.9290 - val_loss: 0.2801 - val_binary_accuracy: 0.8919

Epoch 4/20

30/30 [==============================] - 1s 27ms/step - loss: 0.1827 - binary_accura
cy: 0.9437 - val_loss: 0.2915 - val_binary_accuracy: 0.8844

Epoch 5/20

30/30 [==============================] - 1s 23ms/step - loss: 0.1464 - binary_accura
cy: 0.9582 - val_loss: 0.2773 - val_binary_accuracy: 0.8912

Epoch 6/20

30/30 [==============================] - 1s 29ms/step - loss: 0.1130 - binary_accura
cy: 0.9705 - val_loss: 0.2915 - val_binary_accuracy: 0.8887

Epoch 7/20

30/30 [==============================] - 1s 26ms/step - loss: 0.0957 - binary_accura
cy: 0.9756 - val_loss: 0.3162 - val_binary_accuracy: 0.8774

Epoch 8/20

30/30 [==============================] - 1s 22ms/step - loss: 0.0800 - binary_accura
cy: 0.9801 - val_loss: 0.3175 - val_binary_accuracy: 0.8851

Epoch 9/20

30/30 [==============================] - 1s 22ms/step - loss: 0.0627 - binary_accura
cy: 0.9852 - val_loss: 0.3557 - val_binary_accuracy: 0.8737

Epoch 10/20

30/30 [==============================] - 1s 22ms/step - loss: 0.0510 - binary_accura
cy: 0.9884 - val_loss: 0.3607 - val_binary_accuracy: 0.8816

Epoch 11/20

30/30 [==============================] - 1s 22ms/step - loss: 0.0378 - binary_accura
cy: 0.9934 - val_loss: 0.4033 - val_binary_accuracy: 0.8749

Epoch 12/20

30/30 [==============================] - 1s 23ms/step - loss: 0.0323 - binary_accura
cy: 0.9940 - val_loss: 0.4143 - val_binary_accuracy: 0.8803

Epoch 13/20

30/30 [==============================] - 1s 23ms/step - loss: 0.0234 - binary_accura
cy: 0.9970 - val_loss: 0.4700 - val_binary_accuracy: 0.8665

Epoch 14/20

30/30 [==============================] - 1s 22ms/step - loss: 0.0174 - binary_accura
cy: 0.9984 - val_loss: 0.4743 - val_binary_accuracy: 0.8756

Epoch 15/20

30/30 [==============================] - 1s 24ms/step - loss: 0.0116 - binary_accura
cy: 0.9995 - val_loss: 0.5077 - val_binary_accuracy: 0.8741

Epoch 16/20

30/30 [==============================] - 1s 25ms/step - loss: 0.0088 - binary_accura
cy: 0.9996 - val_loss: 0.5462 - val_binary_accuracy: 0.8709

Epoch 17/20

30/30 [==============================] - 1s 29ms/step - loss: 0.0076 - binary_accura
cy: 0.9993 - val_loss: 0.5741 - val_binary_accuracy: 0.8708

Epoch 18/20

30/30 [==============================] - 1s 28ms/step - loss: 0.0041 - binary_accura
cy: 1.0000 - val_loss: 0.6180 - val_binary_accuracy: 0.8657

Epoch 19/20

30/30 [==============================] - 1s 27ms/step - loss: 0.0063 - binary_accura
cy: 0.9989 - val_loss: 0.6356 - val_binary_accuracy: 0.8692

Epoch 20/20

partial_x_train = x_train[10000:]
y_val = y_train[:10000]

partial_y_train = y_train[10000:]
In [15]: history = model.fit(partial_x_train,

partial_y_train,

epochs=20,

batch_size=512,

validation_data=(x_val, y_val))
30/30 [==============================] - 1s 34ms/step - loss: 0.0025 - binary_accura
cy: 0.9999 - val_loss: 0.6637 - val_binary_accuracy: 0.8687

{'loss': [0.52616947889328, 0.31405124068260193, 0.23073254525661469, 0.180891424417
49573, 0.15155072510242462, 0.11920173466205597, 0.09928888082504272, 0.080168023705
48248, 0.06698485463857651, 0.05347074940800667, 0.04215071722865105, 0.032081160694
36073, 0.024609368294477463, 0.020660480484366417, 0.012686074711382389, 0.009793567
471206188, 0.008989308960735798, 0.004304599016904831, 0.008949910290539265, 0.00232
2724089026451], 'binary_accuracy': [0.7734000086784363, 0.902733325958252, 0.9275333
285331726, 0.9417999982833862, 0.9536666870117188, 0.9657999873161316, 0.97226667404
1748, 0.9791333079338074, 0.9835333228111267, 0.9874666929244995, 0.991133332252502
4, 0.9941333532333374, 0.9962666630744934, 0.9971333146095276, 0.9986666440963745,
0.9995999932289124, 0.9989333152770996, 0.9998666644096375, 0.9980666637420654, 0.99
99333620071411], 'val_loss': [0.3960328698158264, 0.3172730803489685, 0.280055940151
2146, 0.29149702191352844, 0.27728235721588135, 0.2915444076061249, 0.31624060869216
92, 0.31750768423080444, 0.3556799292564392, 0.3607264757156372, 0.4032689929008484,
0.4143247604370117, 0.47000113129615784, 0.4742533564567566, 0.5077142715454102, 0.5
462456345558167, 0.5740538835525513, 0.6180163621902466, 0.6355932950973511, 0.66371
50645256042], 'val_binary_accuracy': [0.868399977684021, 0.8794999718666077, 0.89190
00029563904, 0.8844000101089478, 0.8912000060081482, 0.888700008392334, 0.8773999810
218811, 0.8851000070571899, 0.8737000226974487, 0.881600022315979, 0.874899983406066
9, 0.880299985408783, 0.8665000200271606, 0.8755999803543091, 0.8741000294685364, 0.
8708999752998352, 0.8708000183105469, 0.8657000064849854, 0.8691999912261963, 0.8687
000274658203]}

In [16]: history_dict = history.history

history_dict.keys()

print(history_dict)

In [17]: import matplotlib.pyplot as plt

acc = history.history['binary_accuracy']

val_acc = history.history['val_binary_accuracy']

#acc = history.history['acc']

#val_acc = history.history['val_acc']

loss = history.history['loss']

val_loss = history.history['val_loss']

epochs = range(1, len(acc) + 1)

# "bo" is for "blue dot"

plt.plot(epochs, loss, 'bo', label='Training loss')

# b is for "solid blue line"

plt.plot(epochs, val_loss, 'b', label='Validation loss')

plt.title('Training and validation loss')

plt.xlabel('Epochs')

plt.ylabel('Loss')

plt.legend()

plt.show()

In [18]: plt.clf() # clear figure
Epoch 1/4

49/49 [==============================] - 4s 22ms/step - loss: 0.5358 - accuracy: 0.7
571

Epoch 2/4

49/49 [==============================] - 1s 16ms/step - loss: 0.2672 - accuracy: 0.9
079

Epoch 3/4

49/49 [==============================] - 1s 17ms/step - loss: 0.1949 - accuracy: 0.9
321

Epoch 4/4

49/49 [==============================] - 1s 16ms/step - loss: 0.1659 - accuracy: 0.9
411

782/782 [==============================] - 8s 2ms/step - loss: 0.3059 - accuracy: 0.
8796

[0.3059064447879791, 0.8795599937438965]
array([[0.22144157],

[0.999939 ],

[0.9658772 ],

...,

#acc_values = history_dict['acc']

#val_acc_values = history_dict['val_acc']

acc = history.history['binary_accuracy']

val_acc = history.history['val_binary_accuracy']

plt.plot(epochs, acc, 'bo', label='Training acc')

plt.plot(epochs, val_acc, 'b', label='Validation acc')

plt.title('Training and validation accuracy')

plt.xlabel('Epochs')

plt.ylabel('Loss')

plt.legend()

plt.show()

In [19]: model = models.Sequential()

model.add(layers.Dense(16, activation='relu', input_shape=(10000,)))

model.add(layers.Dense(16, activation='relu'))

model.add(layers.Dense(1, activation='sigmoid'))

model.compile(optimizer='rmsprop',

loss='binary_crossentropy',

metrics=['accuracy'])

model.fit(x_train, y_train, epochs=4, batch_size=512)

results = model.evaluate(x_test, y_test)
In [20]: results

Out[20]:
In [21]: model.predict(x_test)

Out[21]:
[0.16461223],

[0.10813493],

[0.617234 ]], dtype=float32)
In [ ]:

Weitere ähnliche Inhalte

Ähnlich wie Assignment 5.1.pdf

Testing in those hard to reach places
Testing in those hard to reach placesTesting in those hard to reach places
Testing in those hard to reach placesdn
 
Cloudera - A Taste of random decision forests
Cloudera - A Taste of random decision forestsCloudera - A Taste of random decision forests
Cloudera - A Taste of random decision forestsDataconomy Media
 
Davide Cerbo - Kotlin: forse è la volta buona - Codemotion Milan 2017
Davide Cerbo - Kotlin: forse è la volta buona - Codemotion Milan 2017 Davide Cerbo - Kotlin: forse è la volta buona - Codemotion Milan 2017
Davide Cerbo - Kotlin: forse è la volta buona - Codemotion Milan 2017 Codemotion
 
maXbox starter65 machinelearning3
maXbox starter65 machinelearning3maXbox starter65 machinelearning3
maXbox starter65 machinelearning3Max Kleiner
 
Particle Filter Tracking in Python
Particle Filter Tracking in PythonParticle Filter Tracking in Python
Particle Filter Tracking in PythonKohta Ishikawa
 
maXbox starter67 machine learning V
maXbox starter67 machine learning VmaXbox starter67 machine learning V
maXbox starter67 machine learning VMax Kleiner
 
R Programming: Mathematical Functions In R
R Programming: Mathematical Functions In RR Programming: Mathematical Functions In R
R Programming: Mathematical Functions In RRsquared Academy
 
Idioms in swift 2016 05c
Idioms in swift 2016 05cIdioms in swift 2016 05c
Idioms in swift 2016 05cKaz Yoshikawa
 
wk5ppt1_Titanic
wk5ppt1_Titanicwk5ppt1_Titanic
wk5ppt1_TitanicAliciaWei1
 
iOS와 케라스의 만남
iOS와 케라스의 만남iOS와 케라스의 만남
iOS와 케라스의 만남Mijeong Jeon
 
Tokyo APAC Groundbreakers tour - The Complete Java Developer
Tokyo APAC Groundbreakers tour - The Complete Java DeveloperTokyo APAC Groundbreakers tour - The Complete Java Developer
Tokyo APAC Groundbreakers tour - The Complete Java DeveloperConnor McDonald
 
8. Vectors data frames
8. Vectors data frames8. Vectors data frames
8. Vectors data framesExternalEvents
 
Do snow.rwn
Do snow.rwnDo snow.rwn
Do snow.rwnARUN DN
 
you need to complete the r code and a singlepage document c.pdf
you need to complete the r code and a singlepage document c.pdfyou need to complete the r code and a singlepage document c.pdf
you need to complete the r code and a singlepage document c.pdfadnankhan605720
 

Ähnlich wie Assignment 5.1.pdf (20)

Testing in those hard to reach places
Testing in those hard to reach placesTesting in those hard to reach places
Testing in those hard to reach places
 
Cloudera - A Taste of random decision forests
Cloudera - A Taste of random decision forestsCloudera - A Taste of random decision forests
Cloudera - A Taste of random decision forests
 
R and data mining
R and data miningR and data mining
R and data mining
 
Davide Cerbo - Kotlin: forse è la volta buona - Codemotion Milan 2017
Davide Cerbo - Kotlin: forse è la volta buona - Codemotion Milan 2017 Davide Cerbo - Kotlin: forse è la volta buona - Codemotion Milan 2017
Davide Cerbo - Kotlin: forse è la volta buona - Codemotion Milan 2017
 
Performance tests - it's a trap
Performance tests - it's a trapPerformance tests - it's a trap
Performance tests - it's a trap
 
maXbox starter65 machinelearning3
maXbox starter65 machinelearning3maXbox starter65 machinelearning3
maXbox starter65 machinelearning3
 
Particle Filter Tracking in Python
Particle Filter Tracking in PythonParticle Filter Tracking in Python
Particle Filter Tracking in Python
 
Ns2programs
Ns2programsNs2programs
Ns2programs
 
maXbox starter67 machine learning V
maXbox starter67 machine learning VmaXbox starter67 machine learning V
maXbox starter67 machine learning V
 
NCCU: Statistics in the Criminal Justice System, R basics and Simulation - Pr...
NCCU: Statistics in the Criminal Justice System, R basics and Simulation - Pr...NCCU: Statistics in the Criminal Justice System, R basics and Simulation - Pr...
NCCU: Statistics in the Criminal Justice System, R basics and Simulation - Pr...
 
R Programming: Mathematical Functions In R
R Programming: Mathematical Functions In RR Programming: Mathematical Functions In R
R Programming: Mathematical Functions In R
 
Idioms in swift 2016 05c
Idioms in swift 2016 05cIdioms in swift 2016 05c
Idioms in swift 2016 05c
 
wk5ppt1_Titanic
wk5ppt1_Titanicwk5ppt1_Titanic
wk5ppt1_Titanic
 
iOS와 케라스의 만남
iOS와 케라스의 만남iOS와 케라스의 만남
iOS와 케라스의 만남
 
Programming Assignment Help
Programming Assignment HelpProgramming Assignment Help
Programming Assignment Help
 
Coding in Style
Coding in StyleCoding in Style
Coding in Style
 
Tokyo APAC Groundbreakers tour - The Complete Java Developer
Tokyo APAC Groundbreakers tour - The Complete Java DeveloperTokyo APAC Groundbreakers tour - The Complete Java Developer
Tokyo APAC Groundbreakers tour - The Complete Java Developer
 
8. Vectors data frames
8. Vectors data frames8. Vectors data frames
8. Vectors data frames
 
Do snow.rwn
Do snow.rwnDo snow.rwn
Do snow.rwn
 
you need to complete the r code and a singlepage document c.pdf
you need to complete the r code and a singlepage document c.pdfyou need to complete the r code and a singlepage document c.pdf
you need to complete the r code and a singlepage document c.pdf
 

Mehr von dash41

Assignment7.pdf
Assignment7.pdfAssignment7.pdf
Assignment7.pdfdash41
 
Assignment 6.3.pdf
Assignment 6.3.pdfAssignment 6.3.pdf
Assignment 6.3.pdfdash41
 
Assignment 6.2a.pdf
Assignment 6.2a.pdfAssignment 6.2a.pdf
Assignment 6.2a.pdfdash41
 
Assignment 6.1.pdf
Assignment 6.1.pdfAssignment 6.1.pdf
Assignment 6.1.pdfdash41
 
Assignment 5.3.pdf
Assignment 5.3.pdfAssignment 5.3.pdf
Assignment 5.3.pdfdash41
 
Assignment 4.pdf
Assignment 4.pdfAssignment 4.pdf
Assignment 4.pdfdash41
 
Assignment 3.pdf
Assignment 3.pdfAssignment 3.pdf
Assignment 3.pdfdash41
 
rdbms.pdf
rdbms.pdfrdbms.pdf
rdbms.pdfdash41
 
documentsdb.pdf
documentsdb.pdfdocumentsdb.pdf
documentsdb.pdfdash41
 

Mehr von dash41 (9)

Assignment7.pdf
Assignment7.pdfAssignment7.pdf
Assignment7.pdf
 
Assignment 6.3.pdf
Assignment 6.3.pdfAssignment 6.3.pdf
Assignment 6.3.pdf
 
Assignment 6.2a.pdf
Assignment 6.2a.pdfAssignment 6.2a.pdf
Assignment 6.2a.pdf
 
Assignment 6.1.pdf
Assignment 6.1.pdfAssignment 6.1.pdf
Assignment 6.1.pdf
 
Assignment 5.3.pdf
Assignment 5.3.pdfAssignment 5.3.pdf
Assignment 5.3.pdf
 
Assignment 4.pdf
Assignment 4.pdfAssignment 4.pdf
Assignment 4.pdf
 
Assignment 3.pdf
Assignment 3.pdfAssignment 3.pdf
Assignment 3.pdf
 
rdbms.pdf
rdbms.pdfrdbms.pdf
rdbms.pdf
 
documentsdb.pdf
documentsdb.pdfdocumentsdb.pdf
documentsdb.pdf
 

Kürzlich hochgeladen

Data Analysis Project : Targeting the Right Customers, Presentation on Bank M...
Data Analysis Project : Targeting the Right Customers, Presentation on Bank M...Data Analysis Project : Targeting the Right Customers, Presentation on Bank M...
Data Analysis Project : Targeting the Right Customers, Presentation on Bank M...Boston Institute of Analytics
 
Student profile product demonstration on grades, ability, well-being and mind...
Student profile product demonstration on grades, ability, well-being and mind...Student profile product demonstration on grades, ability, well-being and mind...
Student profile product demonstration on grades, ability, well-being and mind...Seán Kennedy
 
modul pembelajaran robotic Workshop _ by Slidesgo.pptx
modul pembelajaran robotic Workshop _ by Slidesgo.pptxmodul pembelajaran robotic Workshop _ by Slidesgo.pptx
modul pembelajaran robotic Workshop _ by Slidesgo.pptxaleedritatuxx
 
Predicting Salary Using Data Science: A Comprehensive Analysis.pdf
Predicting Salary Using Data Science: A Comprehensive Analysis.pdfPredicting Salary Using Data Science: A Comprehensive Analysis.pdf
Predicting Salary Using Data Science: A Comprehensive Analysis.pdfBoston Institute of Analytics
 
Learn How Data Science Changes Our World
Learn How Data Science Changes Our WorldLearn How Data Science Changes Our World
Learn How Data Science Changes Our WorldEduminds Learning
 
RS 9000 Call In girls Dwarka Mor (DELHI)⇛9711147426🔝Delhi
RS 9000 Call In girls Dwarka Mor (DELHI)⇛9711147426🔝DelhiRS 9000 Call In girls Dwarka Mor (DELHI)⇛9711147426🔝Delhi
RS 9000 Call In girls Dwarka Mor (DELHI)⇛9711147426🔝Delhijennyeacort
 
Call Us ➥97111√47426🤳Call Girls in Aerocity (Delhi NCR)
Call Us ➥97111√47426🤳Call Girls in Aerocity (Delhi NCR)Call Us ➥97111√47426🤳Call Girls in Aerocity (Delhi NCR)
Call Us ➥97111√47426🤳Call Girls in Aerocity (Delhi NCR)jennyeacort
 
How we prevented account sharing with MFA
How we prevented account sharing with MFAHow we prevented account sharing with MFA
How we prevented account sharing with MFAAndrei Kaleshka
 
Identifying Appropriate Test Statistics Involving Population Mean
Identifying Appropriate Test Statistics Involving Population MeanIdentifying Appropriate Test Statistics Involving Population Mean
Identifying Appropriate Test Statistics Involving Population MeanMYRABACSAFRA2
 
Semantic Shed - Squashing and Squeezing.pptx
Semantic Shed - Squashing and Squeezing.pptxSemantic Shed - Squashing and Squeezing.pptx
Semantic Shed - Squashing and Squeezing.pptxMike Bennett
 
科罗拉多大学波尔得分校毕业证学位证成绩单-可办理
科罗拉多大学波尔得分校毕业证学位证成绩单-可办理科罗拉多大学波尔得分校毕业证学位证成绩单-可办理
科罗拉多大学波尔得分校毕业证学位证成绩单-可办理e4aez8ss
 
Data Factory in Microsoft Fabric (MsBIP #82)
Data Factory in Microsoft Fabric (MsBIP #82)Data Factory in Microsoft Fabric (MsBIP #82)
Data Factory in Microsoft Fabric (MsBIP #82)Cathrine Wilhelmsen
 
Generative AI for Social Good at Open Data Science East 2024
Generative AI for Social Good at Open Data Science East 2024Generative AI for Social Good at Open Data Science East 2024
Generative AI for Social Good at Open Data Science East 2024Colleen Farrelly
 
RABBIT: A CLI tool for identifying bots based on their GitHub events.
RABBIT: A CLI tool for identifying bots based on their GitHub events.RABBIT: A CLI tool for identifying bots based on their GitHub events.
RABBIT: A CLI tool for identifying bots based on their GitHub events.natarajan8993
 
Biometric Authentication: The Evolution, Applications, Benefits and Challenge...
Biometric Authentication: The Evolution, Applications, Benefits and Challenge...Biometric Authentication: The Evolution, Applications, Benefits and Challenge...
Biometric Authentication: The Evolution, Applications, Benefits and Challenge...GQ Research
 
Advanced Machine Learning for Business Professionals
Advanced Machine Learning for Business ProfessionalsAdvanced Machine Learning for Business Professionals
Advanced Machine Learning for Business ProfessionalsVICTOR MAESTRE RAMIREZ
 
原版1:1定制南十字星大学毕业证(SCU毕业证)#文凭成绩单#真实留信学历认证永久存档
原版1:1定制南十字星大学毕业证(SCU毕业证)#文凭成绩单#真实留信学历认证永久存档原版1:1定制南十字星大学毕业证(SCU毕业证)#文凭成绩单#真实留信学历认证永久存档
原版1:1定制南十字星大学毕业证(SCU毕业证)#文凭成绩单#真实留信学历认证永久存档208367051
 
Vision, Mission, Goals and Objectives ppt..pptx
Vision, Mission, Goals and Objectives ppt..pptxVision, Mission, Goals and Objectives ppt..pptx
Vision, Mission, Goals and Objectives ppt..pptxellehsormae
 
April 2024 - NLIT Cloudera Real-Time LLM Streaming 2024
April 2024 - NLIT Cloudera Real-Time LLM Streaming 2024April 2024 - NLIT Cloudera Real-Time LLM Streaming 2024
April 2024 - NLIT Cloudera Real-Time LLM Streaming 2024Timothy Spann
 
Consent & Privacy Signals on Google *Pixels* - MeasureCamp Amsterdam 2024
Consent & Privacy Signals on Google *Pixels* - MeasureCamp Amsterdam 2024Consent & Privacy Signals on Google *Pixels* - MeasureCamp Amsterdam 2024
Consent & Privacy Signals on Google *Pixels* - MeasureCamp Amsterdam 2024thyngster
 

Kürzlich hochgeladen (20)

Data Analysis Project : Targeting the Right Customers, Presentation on Bank M...
Data Analysis Project : Targeting the Right Customers, Presentation on Bank M...Data Analysis Project : Targeting the Right Customers, Presentation on Bank M...
Data Analysis Project : Targeting the Right Customers, Presentation on Bank M...
 
Student profile product demonstration on grades, ability, well-being and mind...
Student profile product demonstration on grades, ability, well-being and mind...Student profile product demonstration on grades, ability, well-being and mind...
Student profile product demonstration on grades, ability, well-being and mind...
 
modul pembelajaran robotic Workshop _ by Slidesgo.pptx
modul pembelajaran robotic Workshop _ by Slidesgo.pptxmodul pembelajaran robotic Workshop _ by Slidesgo.pptx
modul pembelajaran robotic Workshop _ by Slidesgo.pptx
 
Predicting Salary Using Data Science: A Comprehensive Analysis.pdf
Predicting Salary Using Data Science: A Comprehensive Analysis.pdfPredicting Salary Using Data Science: A Comprehensive Analysis.pdf
Predicting Salary Using Data Science: A Comprehensive Analysis.pdf
 
Learn How Data Science Changes Our World
Learn How Data Science Changes Our WorldLearn How Data Science Changes Our World
Learn How Data Science Changes Our World
 
RS 9000 Call In girls Dwarka Mor (DELHI)⇛9711147426🔝Delhi
RS 9000 Call In girls Dwarka Mor (DELHI)⇛9711147426🔝DelhiRS 9000 Call In girls Dwarka Mor (DELHI)⇛9711147426🔝Delhi
RS 9000 Call In girls Dwarka Mor (DELHI)⇛9711147426🔝Delhi
 
Call Us ➥97111√47426🤳Call Girls in Aerocity (Delhi NCR)
Call Us ➥97111√47426🤳Call Girls in Aerocity (Delhi NCR)Call Us ➥97111√47426🤳Call Girls in Aerocity (Delhi NCR)
Call Us ➥97111√47426🤳Call Girls in Aerocity (Delhi NCR)
 
How we prevented account sharing with MFA
How we prevented account sharing with MFAHow we prevented account sharing with MFA
How we prevented account sharing with MFA
 
Identifying Appropriate Test Statistics Involving Population Mean
Identifying Appropriate Test Statistics Involving Population MeanIdentifying Appropriate Test Statistics Involving Population Mean
Identifying Appropriate Test Statistics Involving Population Mean
 
Semantic Shed - Squashing and Squeezing.pptx
Semantic Shed - Squashing and Squeezing.pptxSemantic Shed - Squashing and Squeezing.pptx
Semantic Shed - Squashing and Squeezing.pptx
 
科罗拉多大学波尔得分校毕业证学位证成绩单-可办理
科罗拉多大学波尔得分校毕业证学位证成绩单-可办理科罗拉多大学波尔得分校毕业证学位证成绩单-可办理
科罗拉多大学波尔得分校毕业证学位证成绩单-可办理
 
Data Factory in Microsoft Fabric (MsBIP #82)
Data Factory in Microsoft Fabric (MsBIP #82)Data Factory in Microsoft Fabric (MsBIP #82)
Data Factory in Microsoft Fabric (MsBIP #82)
 
Generative AI for Social Good at Open Data Science East 2024
Generative AI for Social Good at Open Data Science East 2024Generative AI for Social Good at Open Data Science East 2024
Generative AI for Social Good at Open Data Science East 2024
 
RABBIT: A CLI tool for identifying bots based on their GitHub events.
RABBIT: A CLI tool for identifying bots based on their GitHub events.RABBIT: A CLI tool for identifying bots based on their GitHub events.
RABBIT: A CLI tool for identifying bots based on their GitHub events.
 
Biometric Authentication: The Evolution, Applications, Benefits and Challenge...
Biometric Authentication: The Evolution, Applications, Benefits and Challenge...Biometric Authentication: The Evolution, Applications, Benefits and Challenge...
Biometric Authentication: The Evolution, Applications, Benefits and Challenge...
 
Advanced Machine Learning for Business Professionals
Advanced Machine Learning for Business ProfessionalsAdvanced Machine Learning for Business Professionals
Advanced Machine Learning for Business Professionals
 
原版1:1定制南十字星大学毕业证(SCU毕业证)#文凭成绩单#真实留信学历认证永久存档
原版1:1定制南十字星大学毕业证(SCU毕业证)#文凭成绩单#真实留信学历认证永久存档原版1:1定制南十字星大学毕业证(SCU毕业证)#文凭成绩单#真实留信学历认证永久存档
原版1:1定制南十字星大学毕业证(SCU毕业证)#文凭成绩单#真实留信学历认证永久存档
 
Vision, Mission, Goals and Objectives ppt..pptx
Vision, Mission, Goals and Objectives ppt..pptxVision, Mission, Goals and Objectives ppt..pptx
Vision, Mission, Goals and Objectives ppt..pptx
 
April 2024 - NLIT Cloudera Real-Time LLM Streaming 2024
April 2024 - NLIT Cloudera Real-Time LLM Streaming 2024April 2024 - NLIT Cloudera Real-Time LLM Streaming 2024
April 2024 - NLIT Cloudera Real-Time LLM Streaming 2024
 
Consent & Privacy Signals on Google *Pixels* - MeasureCamp Amsterdam 2024
Consent & Privacy Signals on Google *Pixels* - MeasureCamp Amsterdam 2024Consent & Privacy Signals on Google *Pixels* - MeasureCamp Amsterdam 2024
Consent & Privacy Signals on Google *Pixels* - MeasureCamp Amsterdam 2024
 

Assignment 5.1.pdf

  • 1. '2.5.0' <__array_function__ internals>:5: VisibleDeprecationWarning: Creating an ndarray fro m ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays w ith different lengths or shapes) is deprecated. If you meant to do this, you must sp ecify 'dtype=object' when creating the ndarray C:Userswaleeanaconda3libsite-packageskerasdatasetsimdb.py:155: VisibleDeprec ationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-t uple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated. If you meant to do this, you must specify 'dtype=object' when creating the ndarray x_train, y_train = np.array(xs[:idx]), np.array(labels[:idx]) C:Userswaleeanaconda3libsite-packageskerasdatasetsimdb.py:156: VisibleDeprec ationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-t uple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated. If you meant to do this, you must specify 'dtype=object' when creating the ndarray x_test, y_test = np.array(xs[idx:]), np.array(labels[idx:]) [1, 14, 22, 16, 43, 530, 973, 1622, 1385, 65, 458, 4468, 66, 3941, 4, 173, 36, 256, 5, 25, 100, 43, 838, 112, 50, 670, 2, 9, 35, 480, 284, 5, In [1]: # Import libraries from tensorflow import keras from tensorflow.keras.datasets import mnist from tensorflow.keras.models import Sequential from tensorflow.keras.layers import Dense, Dropout from tensorflow.keras.optimizers import RMSprop In [2]: import keras keras.__version__ Out[2]: In [3]: # Load the dataset from keras.datasets import imdb (train_data, train_labels), (test_data, test_labels) = imdb.load_data(num_words=1000 In [4]: train_data[0] Out[4]:
  • 4. 26, 141, 6, 194, 7486, 18, 4, 226, 22, 21, 134, 476, 26, 480, 5, 144, 30, 5535, 18, 51, 36, 28, 224, 92, 25, 104, 4, 226, 65, 16, 38, 1334, 88, 12, 16, 283, 5, 16, 4472, 113, 103, 32, 15, 16, 5345, 19, 178, 32] 1 9999 In [5]: train_labels[0] Out[5]: In [6]: #Due to 10,000 most recent words restriction, no word index will exceed 10,000: max([max(sequence) for sequence in train_data]) Out[6]: In [7]: # word_index is a dictionary mapping words to an integer index word_index = imdb.get_word_index() # We reverse it, mapping integer indices to words reverse_word_index = dict([(value, key) for (key, value) in word_index.items()]) # We decode the review; note that our indices were offset by 3 # because 0, 1 and 2 are reserved indices for "padding", "start of sequence", and "u decoded_review = ' '.join([reverse_word_index.get(i - 3, '?') for i in train_data[0] decoded_review
  • 5. "? this film was just brilliant casting location scenery story direction everyone's really suited the part they played and you could just imagine being there robert ? i s an amazing actor and now the same being director ? father came from the same scott ish island as myself so i loved the fact there was a real connection with this film the witty remarks throughout the film were great it was just brilliant so much that i bought the film as soon as it was released for ? and would recommend it to everyon e to watch and the fly fishing was amazing really cried at the end it was so sad and you know what they say if you cry at a film it must have been good and this definite ly was also ? to the two little boy's that played the ? of norman and paul they were just brilliant children are often left out of the ? list i think because the stars t hat play them all grown up are such a big profile for the whole film but these child ren are amazing and should be praised for what they have done don't you think the wh ole story was so lovely because it was true and was someone's life after all that wa s shared with us all" array([0., 1., 1., ..., 0., 0., 0.]) C:Userswaleeanaconda3libsite-packagestensorflowpythonkerasoptimizer_v2opti mizer_v2.py:374: UserWarning: The `lr` argument is deprecated, use `learning_rate` i nstead. warnings.warn( Out[7]: In [8]: import numpy as np def vectorize_sequences(sequences, dimension=10000): results = np.zeros((len(sequences), dimension)) for i, sequence in enumerate(sequences): results[i, sequence] = 1. # set specific indices of results[i] to 1s return results # training data x_train = vectorize_sequences(train_data) # test data x_test = vectorize_sequences(test_data) In [9]: # sample train data x_train[0] Out[9]: In [10]: # Vectorized labels y_train = np.asarray(train_labels).astype('float32') y_test = np.asarray(test_labels).astype('float32') In [11]: #The Keras implementation from keras import models from keras import layers model = models.Sequential() model.add(layers.Dense(16, activation='relu', input_shape=(10000,))) model.add(layers.Dense(16, activation='relu')) model.add(layers.Dense(1, activation='sigmoid')) In [12]: model.compile(optimizer='rmsprop', loss='binary_crossentropy', metrics=['accuracy']) In [13]: from keras import optimizers model.compile(optimizer=optimizers.RMSprop(lr=0.001), loss='binary_crossentropy', metrics=['accuracy']) from keras import losses from keras import metrics model.compile(optimizer=optimizers.RMSprop(lr=0.001), loss=losses.binary_crossentropy, metrics=[metrics.binary_accuracy]) In [14]: x_val = x_train[:10000]
  • 6. Epoch 1/20 30/30 [==============================] - 63s 419ms/step - loss: 0.6034 - binary_accu racy: 0.6815 - val_loss: 0.3960 - val_binary_accuracy: 0.8684 Epoch 2/20 30/30 [==============================] - 1s 35ms/step - loss: 0.3324 - binary_accura cy: 0.9012 - val_loss: 0.3173 - val_binary_accuracy: 0.8795 Epoch 3/20 30/30 [==============================] - 1s 30ms/step - loss: 0.2365 - binary_accura cy: 0.9290 - val_loss: 0.2801 - val_binary_accuracy: 0.8919 Epoch 4/20 30/30 [==============================] - 1s 27ms/step - loss: 0.1827 - binary_accura cy: 0.9437 - val_loss: 0.2915 - val_binary_accuracy: 0.8844 Epoch 5/20 30/30 [==============================] - 1s 23ms/step - loss: 0.1464 - binary_accura cy: 0.9582 - val_loss: 0.2773 - val_binary_accuracy: 0.8912 Epoch 6/20 30/30 [==============================] - 1s 29ms/step - loss: 0.1130 - binary_accura cy: 0.9705 - val_loss: 0.2915 - val_binary_accuracy: 0.8887 Epoch 7/20 30/30 [==============================] - 1s 26ms/step - loss: 0.0957 - binary_accura cy: 0.9756 - val_loss: 0.3162 - val_binary_accuracy: 0.8774 Epoch 8/20 30/30 [==============================] - 1s 22ms/step - loss: 0.0800 - binary_accura cy: 0.9801 - val_loss: 0.3175 - val_binary_accuracy: 0.8851 Epoch 9/20 30/30 [==============================] - 1s 22ms/step - loss: 0.0627 - binary_accura cy: 0.9852 - val_loss: 0.3557 - val_binary_accuracy: 0.8737 Epoch 10/20 30/30 [==============================] - 1s 22ms/step - loss: 0.0510 - binary_accura cy: 0.9884 - val_loss: 0.3607 - val_binary_accuracy: 0.8816 Epoch 11/20 30/30 [==============================] - 1s 22ms/step - loss: 0.0378 - binary_accura cy: 0.9934 - val_loss: 0.4033 - val_binary_accuracy: 0.8749 Epoch 12/20 30/30 [==============================] - 1s 23ms/step - loss: 0.0323 - binary_accura cy: 0.9940 - val_loss: 0.4143 - val_binary_accuracy: 0.8803 Epoch 13/20 30/30 [==============================] - 1s 23ms/step - loss: 0.0234 - binary_accura cy: 0.9970 - val_loss: 0.4700 - val_binary_accuracy: 0.8665 Epoch 14/20 30/30 [==============================] - 1s 22ms/step - loss: 0.0174 - binary_accura cy: 0.9984 - val_loss: 0.4743 - val_binary_accuracy: 0.8756 Epoch 15/20 30/30 [==============================] - 1s 24ms/step - loss: 0.0116 - binary_accura cy: 0.9995 - val_loss: 0.5077 - val_binary_accuracy: 0.8741 Epoch 16/20 30/30 [==============================] - 1s 25ms/step - loss: 0.0088 - binary_accura cy: 0.9996 - val_loss: 0.5462 - val_binary_accuracy: 0.8709 Epoch 17/20 30/30 [==============================] - 1s 29ms/step - loss: 0.0076 - binary_accura cy: 0.9993 - val_loss: 0.5741 - val_binary_accuracy: 0.8708 Epoch 18/20 30/30 [==============================] - 1s 28ms/step - loss: 0.0041 - binary_accura cy: 1.0000 - val_loss: 0.6180 - val_binary_accuracy: 0.8657 Epoch 19/20 30/30 [==============================] - 1s 27ms/step - loss: 0.0063 - binary_accura cy: 0.9989 - val_loss: 0.6356 - val_binary_accuracy: 0.8692 Epoch 20/20 partial_x_train = x_train[10000:] y_val = y_train[:10000] partial_y_train = y_train[10000:] In [15]: history = model.fit(partial_x_train, partial_y_train, epochs=20, batch_size=512, validation_data=(x_val, y_val))
  • 7. 30/30 [==============================] - 1s 34ms/step - loss: 0.0025 - binary_accura cy: 0.9999 - val_loss: 0.6637 - val_binary_accuracy: 0.8687 {'loss': [0.52616947889328, 0.31405124068260193, 0.23073254525661469, 0.180891424417 49573, 0.15155072510242462, 0.11920173466205597, 0.09928888082504272, 0.080168023705 48248, 0.06698485463857651, 0.05347074940800667, 0.04215071722865105, 0.032081160694 36073, 0.024609368294477463, 0.020660480484366417, 0.012686074711382389, 0.009793567 471206188, 0.008989308960735798, 0.004304599016904831, 0.008949910290539265, 0.00232 2724089026451], 'binary_accuracy': [0.7734000086784363, 0.902733325958252, 0.9275333 285331726, 0.9417999982833862, 0.9536666870117188, 0.9657999873161316, 0.97226667404 1748, 0.9791333079338074, 0.9835333228111267, 0.9874666929244995, 0.991133332252502 4, 0.9941333532333374, 0.9962666630744934, 0.9971333146095276, 0.9986666440963745, 0.9995999932289124, 0.9989333152770996, 0.9998666644096375, 0.9980666637420654, 0.99 99333620071411], 'val_loss': [0.3960328698158264, 0.3172730803489685, 0.280055940151 2146, 0.29149702191352844, 0.27728235721588135, 0.2915444076061249, 0.31624060869216 92, 0.31750768423080444, 0.3556799292564392, 0.3607264757156372, 0.4032689929008484, 0.4143247604370117, 0.47000113129615784, 0.4742533564567566, 0.5077142715454102, 0.5 462456345558167, 0.5740538835525513, 0.6180163621902466, 0.6355932950973511, 0.66371 50645256042], 'val_binary_accuracy': [0.868399977684021, 0.8794999718666077, 0.89190 00029563904, 0.8844000101089478, 0.8912000060081482, 0.888700008392334, 0.8773999810 218811, 0.8851000070571899, 0.8737000226974487, 0.881600022315979, 0.874899983406066 9, 0.880299985408783, 0.8665000200271606, 0.8755999803543091, 0.8741000294685364, 0. 8708999752998352, 0.8708000183105469, 0.8657000064849854, 0.8691999912261963, 0.8687 000274658203]} In [16]: history_dict = history.history history_dict.keys() print(history_dict) In [17]: import matplotlib.pyplot as plt acc = history.history['binary_accuracy'] val_acc = history.history['val_binary_accuracy'] #acc = history.history['acc'] #val_acc = history.history['val_acc'] loss = history.history['loss'] val_loss = history.history['val_loss'] epochs = range(1, len(acc) + 1) # "bo" is for "blue dot" plt.plot(epochs, loss, 'bo', label='Training loss') # b is for "solid blue line" plt.plot(epochs, val_loss, 'b', label='Validation loss') plt.title('Training and validation loss') plt.xlabel('Epochs') plt.ylabel('Loss') plt.legend() plt.show() In [18]: plt.clf() # clear figure
  • 8. Epoch 1/4 49/49 [==============================] - 4s 22ms/step - loss: 0.5358 - accuracy: 0.7 571 Epoch 2/4 49/49 [==============================] - 1s 16ms/step - loss: 0.2672 - accuracy: 0.9 079 Epoch 3/4 49/49 [==============================] - 1s 17ms/step - loss: 0.1949 - accuracy: 0.9 321 Epoch 4/4 49/49 [==============================] - 1s 16ms/step - loss: 0.1659 - accuracy: 0.9 411 782/782 [==============================] - 8s 2ms/step - loss: 0.3059 - accuracy: 0. 8796 [0.3059064447879791, 0.8795599937438965] array([[0.22144157], [0.999939 ], [0.9658772 ], ..., #acc_values = history_dict['acc'] #val_acc_values = history_dict['val_acc'] acc = history.history['binary_accuracy'] val_acc = history.history['val_binary_accuracy'] plt.plot(epochs, acc, 'bo', label='Training acc') plt.plot(epochs, val_acc, 'b', label='Validation acc') plt.title('Training and validation accuracy') plt.xlabel('Epochs') plt.ylabel('Loss') plt.legend() plt.show() In [19]: model = models.Sequential() model.add(layers.Dense(16, activation='relu', input_shape=(10000,))) model.add(layers.Dense(16, activation='relu')) model.add(layers.Dense(1, activation='sigmoid')) model.compile(optimizer='rmsprop', loss='binary_crossentropy', metrics=['accuracy']) model.fit(x_train, y_train, epochs=4, batch_size=512) results = model.evaluate(x_test, y_test) In [20]: results Out[20]: In [21]: model.predict(x_test) Out[21]: