import tensorflow as tf from tensorflow import keras import numpy as np # Define the model architecture model = keras.Sequential([ keras.layers.Dense(128, input_shape=(26,), activation='relu'), keras.layers.Dense(64, activation='relu'), keras.layers.Dense(1, activation='sigmoid') ]) # Compile the model model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy']) # Define the training data english_words = ['hello', 'world', 'goodbye', 'python', 'programming'] non_english_words = ['hola', 'adios', 'merci', 'gracias', 'danke'] # Convert the words to one-hot encoding def word_to_one_hot(word): one_hot = np.zeros((26,)) for i, letter in enumerate(word.lower()): one_hot[ord(letter) - 97] = 1 return one_hot english_words_one_hot = np.array([word_to_one_hot(word) for word in english_words]) non_english_words_one_hot = np.array([word_to_one_hot(word) for word in non_english_words]) # Create the labels for the training data english_labels = np.ones((len(english_words),)) non_english_labels = np.zeros((len(non_english_words),)) # Combine the training data and labels x_train = np.concatenate((english_words_one_hot, non_english_words_one_hot)) y_train = np.concatenate((english_labels, non_english_labels)) # Train the model model.fit(x_train, y_train, epochs=50, batch_size=2) # Test the model on some new words test_words = ['hello', 'world', 'bonjour', 'merci', 'python'] test_words_one_hot = np.array([word_to_one_hot(word) for word in test_words]) predictions = model.predict(test_words_one_hot) for i, prediction in enumerate(predictions): if prediction > 0.5: print(test_words[i], 'is English') else: print(test_words[i], 'is not English')
1 2 10 42 11
Epoch 1/50
2/10 [=====>........................] - ETA: 1s - loss: 0.5744 - acc: 1.0000
10/10 [==============================] - 0s 49ms/sample - loss: 0.7265 - acc: 0.3000
Epoch 2/50
2/10 [=====>........................] - ETA: 0s - loss: 0.7413 - acc: 0.0000e+00
10/10 [==============================] - 0s 568us/sample - loss: 0.6666 - acc: 0.5000
Epoch 3/50
2/10 [=====>........................] - ETA: 0s - loss: 0.5506 - acc: 1.0000
10/10 [==============================] - 0s 565us/sample - loss: 0.6127 - acc: 0.9000
Epoch 4/50
2/10 [=====>........................] - ETA: 0s - loss: 0.5623 - acc: 1.0000
10/10 [==============================] - 0s 558us/sample - loss: 0.5716 - acc: 1.0000
Epoch 5/50
2/10 [=====>........................] - ETA: 0s - loss: 0.5656 - acc: 1.0000
10/10 [==============================] - 0s 558us/sample - loss: 0.5309 - acc: 1.0000
Epoch 6/50
2/10 [=====>........................] - ETA: 0s - loss: 0.4446 - acc: 1.0000
10/10 [==============================] - 0s 559us/sample - loss: 0.4982 - acc: 1.0000
Epoch 7/50
2/10 [=====>........................] - ETA: 0s - loss: 0.3537 - acc: 1.0000
10/10 [==============================] - 0s 559us/sample - loss: 0.4602 - acc: 1.0000
Epoch 8/50
2/10 [=====>........................] - ETA: 0s - loss: 0.3103 - acc: 1.0000
10/10 [==============================] - 0s 559us/sample - loss: 0.4216 - acc: 1.0000
Epoch 9/50
2/10 [=====>........................] - ETA: 0s - loss: 0.4057 - acc: 1.0000
10/10 [==============================] - 0s 561us/sample - loss: 0.3850 - acc: 1.0000
Epoch 10/50
2/10 [=====>........................] - ETA: 0s - loss: 0.2675 - acc: 1.0000
10/10 [==============================] - 0s 558us/sample - loss: 0.3475 - acc: 1.0000
Epoch 11/50
2/10 [=====>........................] - ETA: 0s - loss: 0.3733 - acc: 1.0000
10/10 [==============================] - 0s 561us/sample - loss: 0.3122 - acc: 1.0000
Epoch 12/50
2/10 [=====>........................] - ETA: 0s - loss: 0.2847 - acc: 1.0000
10/10 [==============================] - 0s 565us/sample - loss: 0.2770 - acc: 1.0000
Epoch 13/50
2/10 [=====>........................] - ETA: 0s - loss: 0.2578 - acc: 1.0000
10/10 [==============================] - 0s 560us/sample - loss: 0.2459 - acc: 1.0000
Epoch 14/50
2/10 [=====>........................] - ETA: 0s - loss: 0.1861 - acc: 1.0000
10/10 [==============================] - 0s 557us/sample - loss: 0.2111 - acc: 1.0000
Epoch 15/50
2/10 [=====>........................] - ETA: 0s - loss: 0.2205 - acc: 1.0000
10/10 [==============================] - 0s 558us/sample - loss: 0.1814 - acc: 1.0000
Epoch 16/50
2/10 [=====>........................] - ETA: 0s - loss: 0.1684 - acc: 1.0000
10/10 [==============================] - 0s 557us/sample - loss: 0.1568 - acc: 1.0000
Epoch 17/50
2/10 [=====>........................] - ETA: 0s - loss: 0.1407 - acc: 1.0000
10/10 [==============================] - 0s 565us/sample - loss: 0.1369 - acc: 1.0000
Epoch 18/50
2/10 [=====>........................] - ETA: 0s - loss: 0.1222 - acc: 1.0000
10/10 [==============================] - 0s 554us/sample - loss: 0.1161 - acc: 1.0000
Epoch 19/50
2/10 [=====>........................] - ETA: 0s - loss: 0.1137 - acc: 1.0000
10/10 [==============================] - 0s 572us/sample - loss: 0.0981 - acc: 1.0000
Epoch 20/50
2/10 [=====>........................] - ETA: 0s - loss: 0.0792 - acc: 1.0000
10/10 [==============================] - 0s 562us/sample - loss: 0.0845 - acc: 1.0000
Epoch 21/50
2/10 [=====>........................] - ETA: 0s - loss: 0.0754 - acc: 1.0000
10/10 [==============================] - 0s 558us/sample - loss: 0.0720 - acc: 1.0000
Epoch 22/50
2/10 [=====>........................] - ETA: 0s - loss: 0.0633 - acc: 1.0000
10/10 [==============================] - 0s 557us/sample - loss: 0.0630 - acc: 1.0000
Epoch 23/50
2/10 [=====>........................] - ETA: 0s - loss: 0.0863 - acc: 1.0000
10/10 [==============================] - 0s 558us/sample - loss: 0.0544 - acc: 1.0000
Epoch 24/50
2/10 [=====>........................] - ETA: 0s - loss: 0.0191 - acc: 1.0000
10/10 [==============================] - 0s 562us/sample - loss: 0.0475 - acc: 1.0000
Epoch 25/50
2/10 [=====>........................] - ETA: 0s - loss: 0.0639 - acc: 1.0000
10/10 [==============================] - 0s 556us/sample - loss: 0.0422 - acc: 1.0000
Epoch 26/50
2/10 [=====>........................] - ETA: 0s - loss: 0.0195 - acc: 1.0000
10/10 [==============================] - 0s 577us/sample - loss: 0.0367 - acc: 1.0000
Epoch 27/50
2/10 [=====>........................] - ETA: 0s - loss: 0.0677 - acc: 1.0000
10/10 [==============================] - 0s 583us/sample - loss: 0.0333 - acc: 1.0000
Epoch 28/50
2/10 [=====>........................] - ETA: 0s - loss: 0.0161 - acc: 1.0000
10/10 [==============================] - 0s 561us/sample - loss: 0.0291 - acc: 1.0000
Epoch 29/50
2/10 [=====>........................] - ETA: 0s - loss: 0.0455 - acc: 1.0000
10/10 [==============================] - 0s 559us/sample - loss: 0.0263 - acc: 1.0000
Epoch 30/50
2/10 [=====>........................] - ETA: 0s - loss: 0.0644 - acc: 1.0000
10/10 [==============================] - 0s 556us/sample - loss: 0.0238 - acc: 1.0000
Epoch 31/50
2/10 [=====>........................] - ETA: 0s - loss: 0.0130 - acc: 1.0000
10/10 [==============================] - 0s 560us/sample - loss: 0.0213 - acc: 1.0000
Epoch 32/50
2/10 [=====>........................] - ETA: 0s - loss: 0.0285 - acc: 1.0000
10/10 [==============================] - 0s 561us/sample - loss: 0.0196 - acc: 1.0000
Epoch 33/50
2/10 [=====>........................] - ETA: 0s - loss: 0.0160 - acc: 1.0000
10/10 [==============================] - 0s 559us/sample - loss: 0.0180 - acc: 1.0000
Epoch 34/50
2/10 [=====>........................] - ETA: 0s - loss: 0.0454 - acc: 1.0000
10/10 [==============================] - 0s 566us/sample - loss: 0.0162 - acc: 1.0000
Epoch 35/50
2/10 [=====>........................] - ETA: 0s - loss: 0.0079 - acc: 1.0000
10/10 [==============================] - 0s 561us/sample - loss: 0.0147 - acc: 1.0000
Epoch 36/50
2/10 [=====>........................] - ETA: 0s - loss: 0.0061 - acc: 1.0000
10/10 [==============================] - 0s 563us/sample - loss: 0.0134 - acc: 1.0000
Epoch 37/50
2/10 [=====>........................] - ETA: 0s - loss: 0.0243 - acc: 1.0000
10/10 [==============================] - 0s 557us/sample - loss: 0.0126 - acc: 1.0000
Epoch 38/50
2/10 [=====>........................] - ETA: 0s - loss: 0.0050 - acc: 1.0000
10/10 [==============================] - 0s 559us/sample - loss: 0.0116 - acc: 1.0000
Epoch 39/50
2/10 [=====>........................] - ETA: 0s - loss: 0.0042 - acc: 1.0000
10/10 [==============================] - 0s 558us/sample - loss: 0.0109 - acc: 1.0000
Epoch 40/50
2/10 [=====>........................] - ETA: 0s - loss: 0.0081 - acc: 1.0000
10/10 [==============================] - 0s 558us/sample - loss: 0.0101 - acc: 1.0000
Epoch 41/50
2/10 [=====>........................] - ETA: 0s - loss: 0.0105 - acc: 1.0000
10/10 [==============================] - 0s 562us/sample - loss: 0.0093 - acc: 1.0000
Epoch 42/50
2/10 [=====>........................] - ETA: 0s - loss: 0.0164 - acc: 1.0000
10/10 [==============================] - 0s 556us/sample - loss: 0.0088 - acc: 1.0000
Epoch 43/50
2/10 [=====>........................] - ETA: 0s - loss: 0.0066 - acc: 1.0000
10/10 [==============================] - 0s 573us/sample - loss: 0.0082 - acc: 1.0000
Epoch 44/50
2/10 [=====>........................] - ETA: 0s - loss: 0.0061 - acc: 1.0000
10/10 [==============================] - 0s 567us/sample - loss: 0.0077 - acc: 1.0000
Epoch 45/50
2/10 [=====>........................] - ETA: 0s - loss: 0.0036 - acc: 1.0000
10/10 [==============================] - 0s 558us/sample - loss: 0.0072 - acc: 1.0000
Epoch 46/50
2/10 [=====>........................] - ETA: 0s - loss: 0.0095 - acc: 1.0000
10/10 [==============================] - 0s 564us/sample - loss: 0.0068 - acc: 1.0000
Epoch 47/50
2/10 [=====>........................] - ETA: 0s - loss: 0.0042 - acc: 1.0000
10/10 [==============================] - 0s 677us/sample - loss: 0.0064 - acc: 1.0000
Epoch 48/50
2/10 [=====>........................] - ETA: 0s - loss: 0.0183 - acc: 1.0000
10/10 [==============================] - 0s 572us/sample - loss: 0.0061 - acc: 1.0000
Epoch 49/50
2/10 [=====>........................] - ETA: 0s - loss: 0.0041 - acc: 1.0000
10/10 [==============================] - 0s 567us/sample - loss: 0.0057 - acc: 1.0000
Epoch 50/50
2/10 [=====>........................] - ETA: 0s - loss: 0.0027 - acc: 1.0000
10/10 [==============================] - 0s 556us/sample - loss: 0.0054 - acc: 1.0000
('hello', 'is English')
('world', 'is English')
('bonjour', 'is English')
('merci', 'is not English')
('python', 'is English')
WARNING:tensorflow:From /usr/local/lib/python2.7/dist-packages/tensorflow/python/ops/resource_variable_ops.py:435: colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version. Instructions for updating: Colocations handled automatically by placer. WARNING:tensorflow:From /usr/local/lib/python2.7/dist-packages/tensorflow/python/ops/math_ops.py:3066: to_int32 (from tensorflow.python.ops.math_ops) is deprecated and will be removed in a future version. Instructions for updating: Use tf.cast instead.